mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-04 10:54:51 +00:00
Compare commits
59 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
860442d5c4 | ||
![]() |
ce5183ce16 | ||
![]() |
3e69b04b86 | ||
![]() |
8b9cd4f122 | ||
![]() |
c0e3ccdb83 | ||
![]() |
e8cc85c487 | ||
![]() |
b3eff41692 | ||
![]() |
1ea63f185c | ||
![]() |
a513d5c09a | ||
![]() |
fb8216c102 | ||
![]() |
4f381d01df | ||
![]() |
de3382226e | ||
![]() |
77be830b72 | ||
![]() |
09c0e1320f | ||
![]() |
cc4ee59542 | ||
![]() |
1f448744f3 | ||
![]() |
ee2c257057 | ||
![]() |
be8439d4ac | ||
![]() |
981f2b193c | ||
![]() |
39087e09ce | ||
![]() |
59960efb9c | ||
![]() |
5a53bb5981 | ||
![]() |
a67fe69cbb | ||
![]() |
9ce2b0765f | ||
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 | ||
![]() |
62e92fada9 | ||
![]() |
ae0a1a657f | ||
![]() |
81e511ba8e | ||
![]() |
d89cb91c8c | ||
![]() |
dc31b6e6fe | ||
![]() |
930a32de1a | ||
![]() |
e40f2ed8e3 | ||
![]() |
abbd3d1078 | ||
![]() |
63c9948456 | ||
![]() |
b6c81d779a | ||
![]() |
2480c83169 | ||
![]() |
334cc66cf6 | ||
![]() |
3cf189ad94 | ||
![]() |
6ffb94a0f5 | ||
![]() |
3593826441 | ||
![]() |
0a0a62f238 | ||
![]() |
41ce9913d2 | ||
![]() |
b77c42384d | ||
![]() |
138bb12f98 | ||
![]() |
4fe2859f4e | ||
![]() |
0768b2b4bc | ||
![]() |
e6f1772a93 | ||
![]() |
5374b2b3b9 | ||
![]() |
1196788856 | ||
![]() |
9f3f47eb80 |
@@ -1,8 +1,8 @@
|
|||||||
FROM python:3.7
|
FROM python:3.7
|
||||||
|
|
||||||
WORKDIR /workspace
|
WORKDIR /workspaces
|
||||||
|
|
||||||
# install Node/Yarn for Frontent
|
# Install Node/Yarn for Frontent
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
git \
|
git \
|
||||||
@@ -17,8 +17,24 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
ENV NVM_DIR /root/.nvm
|
ENV NVM_DIR /root/.nvm
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
software-properties-common \
|
||||||
|
gpg-agent \
|
||||||
|
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||||
|
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
docker-ce \
|
||||||
|
docker-ce-cli \
|
||||||
|
containerd.io \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install Python dependencies from requirements.txt if it exists
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
COPY requirements.txt requirements_tests.txt /workspace/
|
COPY requirements.txt requirements_tests.txt /workspaces/
|
||||||
RUN pip install -r requirements.txt \
|
RUN pip install -r requirements.txt \
|
||||||
&& pip3 install -r requirements_tests.txt \
|
&& pip3 install -r requirements_tests.txt \
|
||||||
&& pip install black tox
|
&& pip install black tox
|
||||||
|
@@ -3,9 +3,11 @@
|
|||||||
"name": "Hass.io dev",
|
"name": "Hass.io dev",
|
||||||
"context": "..",
|
"context": "..",
|
||||||
"dockerFile": "Dockerfile",
|
"dockerFile": "Dockerfile",
|
||||||
|
"appPort": "9123:8123",
|
||||||
"runArgs": [
|
"runArgs": [
|
||||||
"-e",
|
"-e",
|
||||||
"GIT_EDTIOR='code --wait'"
|
"GIT_EDITOR=\"code --wait\"",
|
||||||
|
"--privileged"
|
||||||
],
|
],
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"ms-python.python"
|
"ms-python.python"
|
||||||
|
@@ -18,3 +18,6 @@ venv/
|
|||||||
home-assistant-polymer/*
|
home-assistant-polymer/*
|
||||||
misc/*
|
misc/*
|
||||||
script/*
|
script/*
|
||||||
|
|
||||||
|
# Test ENV
|
||||||
|
data/
|
||||||
|
75
.vscode/tasks.json
vendored
75
.vscode/tasks.json
vendored
@@ -1,10 +1,38 @@
|
|||||||
{
|
{
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"tasks": [
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "Run Testenv",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/test_env.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run Testenv CLI",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"label": "Update UI",
|
"label": "Update UI",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "./script/update-frontend.sh",
|
"command": "./scripts/update-frontend.sh",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@@ -14,6 +42,51 @@
|
|||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pytest",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pytest --timeout=10 tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Flake8",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "flake8 hassio tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pylint",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pylint hassio",
|
||||||
|
"dependsOn": [
|
||||||
|
"Install all Requirements"
|
||||||
|
],
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
51
API.md
51
API.md
@@ -105,6 +105,7 @@ Output is the raw docker log.
|
|||||||
"cpu_percent": 0.0,
|
"cpu_percent": 0.0,
|
||||||
"memory_usage": 283123,
|
"memory_usage": 283123,
|
||||||
"memory_limit": 329392,
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
"network_tx": 0,
|
"network_tx": 0,
|
||||||
"network_rx": 0,
|
"network_rx": 0,
|
||||||
"blk_read": 0,
|
"blk_read": 0,
|
||||||
@@ -421,6 +422,7 @@ Proxy to real websocket instance.
|
|||||||
"cpu_percent": 0.0,
|
"cpu_percent": 0.0,
|
||||||
"memory_usage": 283123,
|
"memory_usage": 283123,
|
||||||
"memory_limit": 329392,
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
"network_tx": 0,
|
"network_tx": 0,
|
||||||
"network_rx": 0,
|
"network_rx": 0,
|
||||||
"blk_read": 0,
|
"blk_read": 0,
|
||||||
@@ -473,6 +475,8 @@ Get all available addons.
|
|||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
"slug": "xdssd_xybla",
|
"slug": "xdssd_xybla",
|
||||||
|
"hostname": "xdssd-xybla",
|
||||||
|
"dns": [],
|
||||||
"description": "description",
|
"description": "description",
|
||||||
"long_description": "null|markdown",
|
"long_description": "null|markdown",
|
||||||
"auto_update": "bool",
|
"auto_update": "bool",
|
||||||
@@ -498,6 +502,7 @@ Get all available addons.
|
|||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
"apparmor": "disable|default|profile",
|
"apparmor": "disable|default|profile",
|
||||||
"devices": ["/dev/xy"],
|
"devices": ["/dev/xy"],
|
||||||
|
"udev": "bool",
|
||||||
"auto_uart": "bool",
|
"auto_uart": "bool",
|
||||||
"icon": "bool",
|
"icon": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
@@ -593,6 +598,7 @@ Write data to add-on stdin
|
|||||||
"cpu_percent": 0.0,
|
"cpu_percent": 0.0,
|
||||||
"memory_usage": 283123,
|
"memory_usage": 283123,
|
||||||
"memory_limit": 329392,
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
"network_tx": 0,
|
"network_tx": 0,
|
||||||
"network_rx": 0,
|
"network_rx": 0,
|
||||||
"blk_read": 0,
|
"blk_read": 0,
|
||||||
@@ -739,6 +745,51 @@ return:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### DNS
|
||||||
|
|
||||||
|
- GET `/dns/info`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "ip-address",
|
||||||
|
"version": "1",
|
||||||
|
"latest_version": "2",
|
||||||
|
"servers": ["dns://8.8.8.8"],
|
||||||
|
"locals": ["dns://xy"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/options`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"servers": ["dns://8.8.8.8"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/update`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/restart`
|
||||||
|
|
||||||
|
- GET `/dns/logs`
|
||||||
|
|
||||||
|
- GET `/dns/stats`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### Auth / SSO API
|
### Auth / SSO API
|
||||||
|
|
||||||
You can use the user system on homeassistant. We handle this auth system on
|
You can use the user system on homeassistant. We handle this auth system on
|
||||||
|
@@ -6,7 +6,7 @@ import sys
|
|||||||
|
|
||||||
from hassio import bootstrap
|
from hassio import bootstrap
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def initialize_event_loop():
|
def initialize_event_loop():
|
||||||
|
@@ -10,14 +10,16 @@ from ..coresys import CoreSys, CoreSysAttributes
|
|||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
AddonsError,
|
AddonsError,
|
||||||
AddonsNotSupportedError,
|
AddonsNotSupportedError,
|
||||||
|
CoreDNSError,
|
||||||
DockerAPIError,
|
DockerAPIError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
HostAppArmorError,
|
HostAppArmorError,
|
||||||
)
|
)
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .data import AddonsData
|
from .data import AddonsData
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
AnyAddon = Union[Addon, AddonStore]
|
AnyAddon = Union[Addon, AddonStore]
|
||||||
|
|
||||||
@@ -73,6 +75,9 @@ class AddonManager(CoreSysAttributes):
|
|||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
|
# Sync DNS
|
||||||
|
await self.sync_dns()
|
||||||
|
|
||||||
async def boot(self, stage: str) -> None:
|
async def boot(self, stage: str) -> None:
|
||||||
"""Boot add-ons with mode auto."""
|
"""Boot add-ons with mode auto."""
|
||||||
tasks = []
|
tasks = []
|
||||||
@@ -155,8 +160,15 @@ class AddonManager(CoreSysAttributes):
|
|||||||
with suppress(HostAppArmorError):
|
with suppress(HostAppArmorError):
|
||||||
await addon.uninstall_apparmor()
|
await addon.uninstall_apparmor()
|
||||||
|
|
||||||
|
# Cleanup Ingress panel from sidebar
|
||||||
|
if addon.ingress_panel:
|
||||||
|
addon.ingress_panel = False
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
# Cleanup internal data
|
# Cleanup internal data
|
||||||
addon.remove_discovery()
|
addon.remove_discovery()
|
||||||
|
|
||||||
self.data.uninstall(addon)
|
self.data.uninstall(addon)
|
||||||
self.local.pop(slug)
|
self.local.pop(slug)
|
||||||
|
|
||||||
@@ -291,3 +303,17 @@ class AddonManager(CoreSysAttributes):
|
|||||||
_LOGGER.error("Can't repair %s", addon.slug)
|
_LOGGER.error("Can't repair %s", addon.slug)
|
||||||
with suppress(AddonsError):
|
with suppress(AddonsError):
|
||||||
await self.uninstall(addon.slug)
|
await self.uninstall(addon.slug)
|
||||||
|
|
||||||
|
async def sync_dns(self) -> None:
|
||||||
|
"""Sync add-ons DNS names."""
|
||||||
|
# Update hosts
|
||||||
|
for addon in self.installed:
|
||||||
|
if not await addon.instance.is_running():
|
||||||
|
continue
|
||||||
|
self.sys_dns.add_host(
|
||||||
|
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write hosts files
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
self.sys_dns.write_hosts()
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
"""Init file for Hass.io add-ons."""
|
"""Init file for Hass.io add-ons."""
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from ipaddress import IPv4Address, ip_address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
import re
|
import re
|
||||||
@@ -9,7 +9,7 @@ import secrets
|
|||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from typing import Any, Awaitable, Dict, Optional
|
from typing import Any, Awaitable, Dict, List, Optional
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
@@ -35,7 +35,7 @@ from ..const import (
|
|||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
STATE_NONE,
|
DNS_SUFFIX,
|
||||||
STATE_STARTED,
|
STATE_STARTED,
|
||||||
STATE_STOPPED,
|
STATE_STOPPED,
|
||||||
)
|
)
|
||||||
@@ -55,7 +55,7 @@ from .model import AddonModel, Data
|
|||||||
from .utils import remove_data
|
from .utils import remove_data
|
||||||
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_WEBUI = re.compile(
|
RE_WEBUI = re.compile(
|
||||||
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||||
@@ -80,8 +80,6 @@ class Addon(AddonModel):
|
|||||||
@property
|
@property
|
||||||
def ip_address(self) -> IPv4Address:
|
def ip_address(self) -> IPv4Address:
|
||||||
"""Return IP of Add-on instance."""
|
"""Return IP of Add-on instance."""
|
||||||
if not self.is_installed:
|
|
||||||
return ip_address("0.0.0.0")
|
|
||||||
return self.instance.ip_address
|
return self.instance.ip_address
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -119,6 +117,11 @@ class Addon(AddonModel):
|
|||||||
"""Return installed version."""
|
"""Return installed version."""
|
||||||
return self.persist[ATTR_VERSION]
|
return self.persist[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self) -> List[str]:
|
||||||
|
"""Return list of DNS name for that add-on."""
|
||||||
|
return [f"{self.hostname}.{DNS_SUFFIX}"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def options(self) -> Dict[str, Any]:
|
def options(self) -> Dict[str, Any]:
|
||||||
"""Return options with local changes."""
|
"""Return options with local changes."""
|
||||||
@@ -447,9 +450,6 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
async def state(self) -> str:
|
async def state(self) -> str:
|
||||||
"""Return running state of add-on."""
|
"""Return running state of add-on."""
|
||||||
if not self.is_installed:
|
|
||||||
return STATE_NONE
|
|
||||||
|
|
||||||
if await self.instance.is_running():
|
if await self.instance.is_running():
|
||||||
return STATE_STARTED
|
return STATE_STARTED
|
||||||
return STATE_STOPPED
|
return STATE_STOPPED
|
||||||
|
@@ -17,7 +17,7 @@ from ..store.addon import AddonStore
|
|||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .validate import SCHEMA_ADDONS_FILE
|
from .validate import SCHEMA_ADDONS_FILE
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
Config = Dict[str, Any]
|
Config = Dict[str, Any]
|
||||||
|
|
||||||
|
@@ -51,6 +51,7 @@ from ..const import (
|
|||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -109,6 +110,16 @@ class AddonModel(CoreSysAttributes):
|
|||||||
"""Return name of add-on."""
|
"""Return name of add-on."""
|
||||||
return self.data[ATTR_NAME]
|
return self.data[ATTR_NAME]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self) -> str:
|
||||||
|
"""Return slug/id of add-on."""
|
||||||
|
return self.slug.replace("_", "-")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self) -> List[str]:
|
||||||
|
"""Return list of DNS name for that add-on."""
|
||||||
|
return []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timeout(self) -> int:
|
def timeout(self) -> int:
|
||||||
"""Return timeout of addon for docker stop."""
|
"""Return timeout of addon for docker stop."""
|
||||||
@@ -333,6 +344,11 @@ class AddonModel(CoreSysAttributes):
|
|||||||
"""Return True if the add-on access to GPIO interface."""
|
"""Return True if the add-on access to GPIO interface."""
|
||||||
return self.data[ATTR_GPIO]
|
return self.data[ATTR_GPIO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_udev(self) -> bool:
|
||||||
|
"""Return True if the add-on have his own udev."""
|
||||||
|
return self.data[ATTR_UDEV]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_kernel_modules(self) -> bool:
|
def with_kernel_modules(self) -> bool:
|
||||||
"""Return True if the add-on access to kernel modules."""
|
"""Return True if the add-on access to kernel modules."""
|
||||||
|
@@ -22,7 +22,7 @@ from ..const import (
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .model import AddonModel
|
from .model import AddonModel
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def rating_security(addon: AddonModel) -> int:
|
def rating_security(addon: AddonModel) -> int:
|
||||||
|
@@ -68,6 +68,7 @@ from ..const import (
|
|||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
@@ -94,7 +95,7 @@ from ..validate import (
|
|||||||
UUID_MATCH,
|
UUID_MATCH,
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||||
@@ -186,6 +187,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
|
@@ -9,6 +9,7 @@ from ..coresys import CoreSys, CoreSysAttributes
|
|||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
from .auth import APIAuth
|
from .auth import APIAuth
|
||||||
from .discovery import APIDiscovery
|
from .discovery import APIDiscovery
|
||||||
|
from .dns import APICoreDNS
|
||||||
from .hardware import APIHardware
|
from .hardware import APIHardware
|
||||||
from .hassos import APIHassOS
|
from .hassos import APIHassOS
|
||||||
from .homeassistant import APIHomeAssistant
|
from .homeassistant import APIHomeAssistant
|
||||||
@@ -21,7 +22,7 @@ from .services import APIServices
|
|||||||
from .snapshots import APISnapshots
|
from .snapshots import APISnapshots
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RestAPI(CoreSysAttributes):
|
class RestAPI(CoreSysAttributes):
|
||||||
@@ -55,6 +56,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._register_services()
|
self._register_services()
|
||||||
self._register_info()
|
self._register_info()
|
||||||
self._register_auth()
|
self._register_auth()
|
||||||
|
self._register_dns()
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
def _register_host(self) -> None:
|
||||||
"""Register hostcontrol functions."""
|
"""Register hostcontrol functions."""
|
||||||
@@ -264,6 +266,22 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _register_dns(self) -> None:
|
||||||
|
"""Register DNS functions."""
|
||||||
|
api_dns = APICoreDNS()
|
||||||
|
api_dns.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/dns/info", api_dns.info),
|
||||||
|
web.get("/dns/stats", api_dns.stats),
|
||||||
|
web.get("/dns/logs", api_dns.logs),
|
||||||
|
web.post("/dns/update", api_dns.update),
|
||||||
|
web.post("/dns/options", api_dns.options),
|
||||||
|
web.post("/dns/restart", api_dns.restart),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_panel(self) -> None:
|
def _register_panel(self) -> None:
|
||||||
"""Register panel for Home Assistant."""
|
"""Register panel for Home Assistant."""
|
||||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||||
|
@@ -8,6 +8,7 @@ import voluptuous as vol
|
|||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ..addons import AnyAddon
|
from ..addons import AnyAddon
|
||||||
|
from ..docker.stats import DockerStats
|
||||||
from ..addons.utils import rating_security
|
from ..addons.utils import rating_security
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -30,6 +31,7 @@ from ..const import (
|
|||||||
ATTR_DEVICES,
|
ATTR_DEVICES,
|
||||||
ATTR_DEVICETREE,
|
ATTR_DEVICETREE,
|
||||||
ATTR_DISCOVERY,
|
ATTR_DISCOVERY,
|
||||||
|
ATTR_DNS,
|
||||||
ATTR_DOCKER_API,
|
ATTR_DOCKER_API,
|
||||||
ATTR_FULL_ACCESS,
|
ATTR_FULL_ACCESS,
|
||||||
ATTR_GPIO,
|
ATTR_GPIO,
|
||||||
@@ -41,6 +43,7 @@ from ..const import (
|
|||||||
ATTR_HOST_IPC,
|
ATTR_HOST_IPC,
|
||||||
ATTR_HOST_NETWORK,
|
ATTR_HOST_NETWORK,
|
||||||
ATTR_HOST_PID,
|
ATTR_HOST_PID,
|
||||||
|
ATTR_HOSTNAME,
|
||||||
ATTR_ICON,
|
ATTR_ICON,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_ENTRY,
|
ATTR_INGRESS_ENTRY,
|
||||||
@@ -56,6 +59,7 @@ from ..const import (
|
|||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAINTAINER,
|
ATTR_MAINTAINER,
|
||||||
ATTR_MEMORY_LIMIT,
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
ATTR_MEMORY_USAGE,
|
ATTR_MEMORY_USAGE,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_NETWORK,
|
ATTR_NETWORK,
|
||||||
@@ -73,6 +77,7 @@ from ..const import (
|
|||||||
ATTR_SOURCE,
|
ATTR_SOURCE,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -89,7 +94,7 @@ from ..exceptions import APIError
|
|||||||
from ..validate import ALSA_DEVICE, DOCKER_PORTS
|
from ..validate import ALSA_DEVICE, DOCKER_PORTS
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
@@ -116,7 +121,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
self, request: web.Request, check_installed: bool = True
|
self, request: web.Request, check_installed: bool = True
|
||||||
) -> AnyAddon:
|
) -> AnyAddon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
addon_slug = request.match_info.get("addon")
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
# Lookup itself
|
# Lookup itself
|
||||||
if addon_slug == "self":
|
if addon_slug == "self":
|
||||||
@@ -175,11 +180,13 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
"""Return add-on information."""
|
"""Return add-on information."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_SLUG: addon.slug,
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_HOSTNAME: addon.hostname,
|
||||||
|
ATTR_DNS: addon.dns,
|
||||||
ATTR_DESCRIPTON: addon.description,
|
ATTR_DESCRIPTON: addon.description,
|
||||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||||
ATTR_AUTO_UPDATE: None,
|
ATTR_AUTO_UPDATE: None,
|
||||||
@@ -220,6 +227,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_GPIO: addon.with_gpio,
|
ATTR_GPIO: addon.with_gpio,
|
||||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||||
ATTR_DEVICETREE: addon.with_devicetree,
|
ATTR_DEVICETREE: addon.with_devicetree,
|
||||||
|
ATTR_UDEV: addon.with_udev,
|
||||||
ATTR_DOCKER_API: addon.access_docker_api,
|
ATTR_DOCKER_API: addon.access_docker_api,
|
||||||
ATTR_AUDIO: addon.with_audio,
|
ATTR_AUDIO: addon.with_audio,
|
||||||
ATTR_AUDIO_INPUT: None,
|
ATTR_AUDIO_INPUT: None,
|
||||||
@@ -256,12 +264,12 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def options(self, request: web.Request) -> None:
|
async def options(self, request: web.Request) -> None:
|
||||||
"""Store user options for add-on."""
|
"""Store user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
addon_schema = SCHEMA_OPTIONS.extend(
|
addon_schema = SCHEMA_OPTIONS.extend(
|
||||||
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
||||||
)
|
)
|
||||||
body = await api_validate(addon_schema, request)
|
body: Dict[str, Any] = await api_validate(addon_schema, request)
|
||||||
|
|
||||||
if ATTR_OPTIONS in body:
|
if ATTR_OPTIONS in body:
|
||||||
addon.options = body[ATTR_OPTIONS]
|
addon.options = body[ATTR_OPTIONS]
|
||||||
@@ -284,8 +292,8 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def security(self, request: web.Request) -> None:
|
async def security(self, request: web.Request) -> None:
|
||||||
"""Store security options for add-on."""
|
"""Store security options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
body = await api_validate(SCHEMA_SECURITY, request)
|
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
if ATTR_PROTECTED in body:
|
if ATTR_PROTECTED in body:
|
||||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||||
@@ -296,13 +304,14 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
stats = await addon.stats()
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
ATTR_NETWORK_RX: stats.network_rx,
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
ATTR_NETWORK_TX: stats.network_tx,
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
ATTR_BLK_READ: stats.blk_read,
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
@@ -312,19 +321,19 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def install(self, request: web.Request) -> Awaitable[None]:
|
def install(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Install add-on."""
|
"""Install add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
return asyncio.shield(addon.install())
|
return asyncio.shield(addon.install())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Uninstall add-on."""
|
"""Uninstall add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.uninstall())
|
return asyncio.shield(addon.uninstall())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request: web.Request) -> Awaitable[None]:
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
# check options
|
# check options
|
||||||
options = addon.options
|
options = addon.options
|
||||||
@@ -338,13 +347,13 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def update(self, request: web.Request) -> Awaitable[None]:
|
def update(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Update add-on."""
|
"""Update add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
if addon.latest_version == addon.version:
|
if addon.latest_version == addon.version:
|
||||||
raise APIError("No update available!")
|
raise APIError("No update available!")
|
||||||
@@ -354,13 +363,13 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.restart())
|
return asyncio.shield(addon.restart())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
if not addon.need_build:
|
if not addon.need_build:
|
||||||
raise APIError("Only local build addons are supported")
|
raise APIError("Only local build addons are supported")
|
||||||
|
|
||||||
@@ -369,13 +378,13 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
"""Return logs from add-on."""
|
"""Return logs from add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return addon.logs()
|
return addon.logs()
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_PNG)
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
async def icon(self, request: web.Request) -> bytes:
|
async def icon(self, request: web.Request) -> bytes:
|
||||||
"""Return icon from add-on."""
|
"""Return icon from add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_icon:
|
if not addon.with_icon:
|
||||||
raise APIError("No icon found!")
|
raise APIError("No icon found!")
|
||||||
|
|
||||||
@@ -385,7 +394,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_PNG)
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
async def logo(self, request: web.Request) -> bytes:
|
async def logo(self, request: web.Request) -> bytes:
|
||||||
"""Return logo from add-on."""
|
"""Return logo from add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_logo:
|
if not addon.with_logo:
|
||||||
raise APIError("No logo found!")
|
raise APIError("No logo found!")
|
||||||
|
|
||||||
@@ -395,7 +404,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
async def changelog(self, request: web.Request) -> str:
|
async def changelog(self, request: web.Request) -> str:
|
||||||
"""Return changelog from add-on."""
|
"""Return changelog from add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_changelog:
|
if not addon.with_changelog:
|
||||||
raise APIError("No changelog found!")
|
raise APIError("No changelog found!")
|
||||||
|
|
||||||
@@ -405,7 +414,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request: web.Request) -> None:
|
async def stdin(self, request: web.Request) -> None:
|
||||||
"""Write to stdin of add-on."""
|
"""Write to stdin of add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise APIError("STDIN not supported by add-on")
|
raise APIError("STDIN not supported by add-on")
|
||||||
|
|
||||||
|
@@ -10,7 +10,7 @@ from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIForbidden
|
from ..exceptions import APIForbidden
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class APIAuth(CoreSysAttributes):
|
class APIAuth(CoreSysAttributes):
|
||||||
|
97
hassio/api/dns.py
Normal file
97
hassio/api/dns.py
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
"""Init file for Hass.io DNS RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Awaitable, Dict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_BLK_READ,
|
||||||
|
ATTR_BLK_WRITE,
|
||||||
|
ATTR_CPU_PERCENT,
|
||||||
|
ATTR_HOST,
|
||||||
|
ATTR_LATEST_VERSION,
|
||||||
|
ATTR_LOCALS,
|
||||||
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_NETWORK_RX,
|
||||||
|
ATTR_NETWORK_TX,
|
||||||
|
ATTR_SERVERS,
|
||||||
|
ATTR_VERSION,
|
||||||
|
CONTENT_TYPE_BINARY,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError
|
||||||
|
from ..validate import DNS_SERVER_LIST
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): DNS_SERVER_LIST})
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
|
||||||
|
class APICoreDNS(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for DNS functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return DNS information."""
|
||||||
|
return {
|
||||||
|
ATTR_VERSION: self.sys_dns.version,
|
||||||
|
ATTR_LATEST_VERSION: self.sys_dns.latest_version,
|
||||||
|
ATTR_HOST: str(self.sys_docker.network.dns),
|
||||||
|
ATTR_SERVERS: self.sys_dns.servers,
|
||||||
|
ATTR_LOCALS: self.sys_host.network.dns_servers,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request: web.Request) -> None:
|
||||||
|
"""Set DNS options."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_SERVERS in body:
|
||||||
|
self.sys_dns.servers = body[ATTR_SERVERS]
|
||||||
|
self.sys_create_task(self.sys_dns.restart())
|
||||||
|
|
||||||
|
self.sys_dns.save_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return resource information."""
|
||||||
|
stats = await self.sys_dns.stats()
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request: web.Request) -> None:
|
||||||
|
"""Update DNS plugin."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_dns.latest_version)
|
||||||
|
|
||||||
|
if version == self.sys_dns.version:
|
||||||
|
raise APIError("Version {} is already in use".format(version))
|
||||||
|
await asyncio.shield(self.sys_dns.update(version))
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return DNS Docker logs."""
|
||||||
|
return self.sys_dns.logs()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Restart CoreDNS plugin."""
|
||||||
|
return asyncio.shield(self.sys_dns.restart())
|
@@ -12,7 +12,7 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class APIHardware(CoreSysAttributes):
|
class APIHardware(CoreSysAttributes):
|
||||||
@@ -22,7 +22,9 @@ class APIHardware(CoreSysAttributes):
|
|||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Show hardware info."""
|
"""Show hardware info."""
|
||||||
return {
|
return {
|
||||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
ATTR_SERIAL: list(
|
||||||
|
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||||
|
),
|
||||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
@@ -18,6 +18,7 @@ from ..const import (
|
|||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MEMORY_LIMIT,
|
ATTR_MEMORY_LIMIT,
|
||||||
ATTR_MEMORY_USAGE,
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
ATTR_NETWORK_RX,
|
ATTR_NETWORK_RX,
|
||||||
ATTR_NETWORK_TX,
|
ATTR_NETWORK_TX,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
@@ -35,7 +36,7 @@ from ..exceptions import APIError
|
|||||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
@@ -121,6 +122,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
ATTR_NETWORK_RX: stats.network_rx,
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
ATTR_NETWORK_TX: stats.network_tx,
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
ATTR_BLK_READ: stats.blk_read,
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
@@ -20,7 +20,7 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SERVICE = "service"
|
SERVICE = "service"
|
||||||
|
|
||||||
|
@@ -19,7 +19,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .utils import api_process
|
from .utils import api_process
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class APIInfo(CoreSysAttributes):
|
class APIInfo(CoreSysAttributes):
|
||||||
|
@@ -28,7 +28,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .utils import api_process
|
from .utils import api_process
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class APIIngress(CoreSysAttributes):
|
class APIIngress(CoreSysAttributes):
|
||||||
|
@@ -14,7 +14,7 @@ from ..const import HEADER_HA_ACCESS
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError, APIError
|
from ..exceptions import HomeAssistantAuthError, HomeAssistantAPIError, APIError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class APIProxy(CoreSysAttributes):
|
class APIProxy(CoreSysAttributes):
|
||||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
|
|
||||||
@@ -67,6 +67,7 @@ ADDONS_ROLE_ACCESS = {
|
|||||||
),
|
),
|
||||||
ROLE_MANAGER: re.compile(
|
ROLE_MANAGER: re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
|
r"|/dns/.*"
|
||||||
r"|/homeassistant/.+"
|
r"|/homeassistant/.+"
|
||||||
r"|/host/.+"
|
r"|/host/.+"
|
||||||
r"|/hardware/.+"
|
r"|/hardware/.+"
|
||||||
|
@@ -28,7 +28,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
|
@@ -25,6 +25,7 @@ from ..const import (
|
|||||||
ATTR_LOGO,
|
ATTR_LOGO,
|
||||||
ATTR_MEMORY_LIMIT,
|
ATTR_MEMORY_LIMIT,
|
||||||
ATTR_MEMORY_USAGE,
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_NETWORK_RX,
|
ATTR_NETWORK_RX,
|
||||||
ATTR_NETWORK_TX,
|
ATTR_NETWORK_TX,
|
||||||
@@ -43,7 +44,7 @@ from ..utils.validate import validate_timezone
|
|||||||
from ..validate import CHANNELS, LOG_LEVEL, REPOSITORIES, WAIT_BOOT
|
from ..validate import CHANNELS, LOG_LEVEL, REPOSITORIES, WAIT_BOOT
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
@@ -140,6 +141,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
ATTR_NETWORK_RX: stats.network_rx,
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
ATTR_NETWORK_TX: stats.network_tx,
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
ATTR_BLK_READ: stats.blk_read,
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
@@ -16,7 +16,7 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..exceptions import HassioError, APIError, APIForbidden
|
from ..exceptions import HassioError, APIError, APIForbidden
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def json_loads(data):
|
def json_loads(data):
|
||||||
|
@@ -8,7 +8,9 @@ from .coresys import CoreSys, CoreSysAttributes
|
|||||||
from .exceptions import HassioArchNotFound, JsonFileError
|
from .exceptions import HassioArchNotFound, JsonFileError
|
||||||
from .utils.json import read_json_file
|
from .utils.json import read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ARCH_JSON: Path = Path(__file__).parent.joinpath("data/arch.json")
|
||||||
|
|
||||||
MAP_CPU = {
|
MAP_CPU = {
|
||||||
"armv7": "armv7",
|
"armv7": "armv7",
|
||||||
@@ -47,7 +49,7 @@ class CpuArch(CoreSysAttributes):
|
|||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Load data and initialize default arch."""
|
"""Load data and initialize default arch."""
|
||||||
try:
|
try:
|
||||||
arch_data = read_json_file(Path(__file__).parent.joinpath("arch.json"))
|
arch_data = read_json_file(ARCH_JSON)
|
||||||
except JsonFileError:
|
except JsonFileError:
|
||||||
_LOGGER.warning("Can't read arch json")
|
_LOGGER.warning("Can't read arch json")
|
||||||
return
|
return
|
||||||
|
@@ -8,7 +8,7 @@ from .utils.json import JsonConfig
|
|||||||
from .validate import SCHEMA_AUTH_CONFIG
|
from .validate import SCHEMA_AUTH_CONFIG
|
||||||
from .exceptions import AuthError, HomeAssistantAPIError
|
from .exceptions import AuthError, HomeAssistantAPIError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Auth(JsonConfig, CoreSysAttributes):
|
class Auth(JsonConfig, CoreSysAttributes):
|
||||||
|
@@ -11,11 +11,12 @@ from .addons import AddonManager
|
|||||||
from .api import RestAPI
|
from .api import RestAPI
|
||||||
from .arch import CpuArch
|
from .arch import CpuArch
|
||||||
from .auth import Auth
|
from .auth import Auth
|
||||||
from .const import SOCKET_DOCKER
|
from .const import CHANNEL_DEV, SOCKET_DOCKER
|
||||||
from .core import HassIO
|
from .core import HassIO
|
||||||
from .coresys import CoreSys
|
from .coresys import CoreSys
|
||||||
from .dbus import DBusManager
|
from .dbus import DBusManager
|
||||||
from .discovery import Discovery
|
from .discovery import Discovery
|
||||||
|
from .dns import CoreDNS
|
||||||
from .hassos import HassOS
|
from .hassos import HassOS
|
||||||
from .homeassistant import HomeAssistant
|
from .homeassistant import HomeAssistant
|
||||||
from .host import HostManager
|
from .host import HostManager
|
||||||
@@ -28,7 +29,7 @@ from .tasks import Tasks
|
|||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
from .utils.dt import fetch_timezone
|
from .utils.dt import fetch_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
ENV_SHARE = "SUPERVISOR_SHARE"
|
||||||
ENV_NAME = "SUPERVISOR_NAME"
|
ENV_NAME = "SUPERVISOR_NAME"
|
||||||
@@ -43,6 +44,7 @@ async def initialize_coresys():
|
|||||||
|
|
||||||
# Initialize core objects
|
# Initialize core objects
|
||||||
coresys.core = HassIO(coresys)
|
coresys.core = HassIO(coresys)
|
||||||
|
coresys.dns = CoreDNS(coresys)
|
||||||
coresys.arch = CpuArch(coresys)
|
coresys.arch = CpuArch(coresys)
|
||||||
coresys.auth = Auth(coresys)
|
coresys.auth = Auth(coresys)
|
||||||
coresys.updater = Updater(coresys)
|
coresys.updater = Updater(coresys)
|
||||||
@@ -127,9 +129,21 @@ def initialize_system_data(coresys: CoreSys):
|
|||||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
||||||
config.path_apparmor.mkdir()
|
config.path_apparmor.mkdir()
|
||||||
|
|
||||||
|
# dns folder
|
||||||
|
if not config.path_dns.is_dir():
|
||||||
|
_LOGGER.info("Create Hass.io DNS folder %s", config.path_dns)
|
||||||
|
config.path_dns.mkdir()
|
||||||
|
|
||||||
# Update log level
|
# Update log level
|
||||||
coresys.config.modify_log_level()
|
coresys.config.modify_log_level()
|
||||||
|
|
||||||
|
# Check if ENV is in development mode
|
||||||
|
if bool(os.environ.get("SUPERVISOR_DEV", 0)):
|
||||||
|
_LOGGER.warning("SUPERVISOR_DEV is set")
|
||||||
|
coresys.updater.channel = CHANNEL_DEV
|
||||||
|
coresys.config.logging = "debug"
|
||||||
|
coresys.config.debug = True
|
||||||
|
|
||||||
|
|
||||||
def migrate_system_env(coresys: CoreSys):
|
def migrate_system_env(coresys: CoreSys):
|
||||||
"""Cleanup some stuff after update."""
|
"""Cleanup some stuff after update."""
|
||||||
|
@@ -19,7 +19,7 @@ from .utils.dt import parse_datetime
|
|||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_HASSIO_CONFIG
|
from .validate import SCHEMA_HASSIO_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||||
|
|
||||||
@@ -34,6 +34,7 @@ BACKUP_DATA = PurePath("backup")
|
|||||||
SHARE_DATA = PurePath("share")
|
SHARE_DATA = PurePath("share")
|
||||||
TMP_DATA = PurePath("tmp")
|
TMP_DATA = PurePath("tmp")
|
||||||
APPARMOR_DATA = PurePath("apparmor")
|
APPARMOR_DATA = PurePath("apparmor")
|
||||||
|
DNS_DATA = PurePath("dns")
|
||||||
|
|
||||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||||
|
|
||||||
@@ -99,7 +100,7 @@ class CoreConfig(JsonConfig):
|
|||||||
def modify_log_level(self) -> None:
|
def modify_log_level(self) -> None:
|
||||||
"""Change log level."""
|
"""Change log level."""
|
||||||
lvl = getattr(logging, self.logging.upper())
|
lvl = getattr(logging, self.logging.upper())
|
||||||
logging.basicConfig(level=lvl)
|
logging.getLogger("hassio").setLevel(lvl)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_boot(self):
|
def last_boot(self):
|
||||||
@@ -211,6 +212,16 @@ class CoreConfig(JsonConfig):
|
|||||||
"""Return root share data folder external for Docker."""
|
"""Return root share data folder external for Docker."""
|
||||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_extern_dns(self):
|
||||||
|
"""Return dns path external for Docker."""
|
||||||
|
return str(PurePath(self.path_extern_hassio, DNS_DATA))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_dns(self):
|
||||||
|
"""Return dns path inside supervisor."""
|
||||||
|
return Path(HASSIO_DATA, DNS_DATA)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addons_repositories(self):
|
def addons_repositories(self):
|
||||||
"""Return list of custom Add-on repositories."""
|
"""Return list of custom Add-on repositories."""
|
||||||
|
@@ -3,7 +3,7 @@ from pathlib import Path
|
|||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
|
|
||||||
HASSIO_VERSION = "173"
|
HASSIO_VERSION = "182"
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||||
URL_HASSIO_VERSION = "https://version.home-assistant.io/{channel}.json"
|
URL_HASSIO_VERSION = "https://version.home-assistant.io/{channel}.json"
|
||||||
@@ -24,6 +24,7 @@ FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
|||||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||||
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||||
FILE_HASSIO_INGRESS = Path(HASSIO_DATA, "ingress.json")
|
FILE_HASSIO_INGRESS = Path(HASSIO_DATA, "ingress.json")
|
||||||
|
FILE_HASSIO_DNS = Path(HASSIO_DATA, "dns.json")
|
||||||
|
|
||||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||||
|
|
||||||
@@ -31,6 +32,9 @@ DOCKER_NETWORK = "hassio"
|
|||||||
DOCKER_NETWORK_MASK = ip_network("172.30.32.0/23")
|
DOCKER_NETWORK_MASK = ip_network("172.30.32.0/23")
|
||||||
DOCKER_NETWORK_RANGE = ip_network("172.30.33.0/24")
|
DOCKER_NETWORK_RANGE = ip_network("172.30.33.0/24")
|
||||||
|
|
||||||
|
DNS_SERVERS = ["dns://1.1.1.1", "dns://9.9.9.9"]
|
||||||
|
DNS_SUFFIX = "local.hass.io"
|
||||||
|
|
||||||
LABEL_VERSION = "io.hass.version"
|
LABEL_VERSION = "io.hass.version"
|
||||||
LABEL_ARCH = "io.hass.arch"
|
LABEL_ARCH = "io.hass.arch"
|
||||||
LABEL_TYPE = "io.hass.type"
|
LABEL_TYPE = "io.hass.type"
|
||||||
@@ -86,6 +90,7 @@ ATTR_VERSION_LATEST = "version_latest"
|
|||||||
ATTR_AUTO_UART = "auto_uart"
|
ATTR_AUTO_UART = "auto_uart"
|
||||||
ATTR_LAST_BOOT = "last_boot"
|
ATTR_LAST_BOOT = "last_boot"
|
||||||
ATTR_LAST_VERSION = "last_version"
|
ATTR_LAST_VERSION = "last_version"
|
||||||
|
ATTR_LATEST_VERSION = "latest_version"
|
||||||
ATTR_CHANNEL = "channel"
|
ATTR_CHANNEL = "channel"
|
||||||
ATTR_NAME = "name"
|
ATTR_NAME = "name"
|
||||||
ATTR_SLUG = "slug"
|
ATTR_SLUG = "slug"
|
||||||
@@ -159,6 +164,7 @@ ATTR_NETWORK_RX = "network_rx"
|
|||||||
ATTR_NETWORK_TX = "network_tx"
|
ATTR_NETWORK_TX = "network_tx"
|
||||||
ATTR_MEMORY_LIMIT = "memory_limit"
|
ATTR_MEMORY_LIMIT = "memory_limit"
|
||||||
ATTR_MEMORY_USAGE = "memory_usage"
|
ATTR_MEMORY_USAGE = "memory_usage"
|
||||||
|
ATTR_MEMORY_PERCENT = "memory_percent"
|
||||||
ATTR_BLK_READ = "blk_read"
|
ATTR_BLK_READ = "blk_read"
|
||||||
ATTR_BLK_WRITE = "blk_write"
|
ATTR_BLK_WRITE = "blk_write"
|
||||||
ATTR_ADDON = "addon"
|
ATTR_ADDON = "addon"
|
||||||
@@ -210,6 +216,10 @@ ATTR_ADMIN = "admin"
|
|||||||
ATTR_PANELS = "panels"
|
ATTR_PANELS = "panels"
|
||||||
ATTR_DEBUG = "debug"
|
ATTR_DEBUG = "debug"
|
||||||
ATTR_DEBUG_BLOCK = "debug_block"
|
ATTR_DEBUG_BLOCK = "debug_block"
|
||||||
|
ATTR_DNS = "dns"
|
||||||
|
ATTR_SERVERS = "servers"
|
||||||
|
ATTR_LOCALS = "locals"
|
||||||
|
ATTR_UDEV = "udev"
|
||||||
|
|
||||||
PROVIDE_SERVICE = "provide"
|
PROVIDE_SERVICE = "provide"
|
||||||
NEED_SERVICE = "need"
|
NEED_SERVICE = "need"
|
||||||
|
@@ -14,7 +14,7 @@ from .const import (
|
|||||||
)
|
)
|
||||||
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HassIO(CoreSysAttributes):
|
class HassIO(CoreSysAttributes):
|
||||||
@@ -36,6 +36,9 @@ class HassIO(CoreSysAttributes):
|
|||||||
# Load Host
|
# Load Host
|
||||||
await self.sys_host.load()
|
await self.sys_host.load()
|
||||||
|
|
||||||
|
# Load CoreDNS
|
||||||
|
await self.sys_dns.load()
|
||||||
|
|
||||||
# Load Home Assistant
|
# Load Home Assistant
|
||||||
await self.sys_homeassistant.load()
|
await self.sys_homeassistant.load()
|
||||||
|
|
||||||
@@ -69,9 +72,6 @@ class HassIO(CoreSysAttributes):
|
|||||||
# Load ingress
|
# Load ingress
|
||||||
await self.sys_ingress.load()
|
await self.sys_ingress.load()
|
||||||
|
|
||||||
# start dns forwarding
|
|
||||||
self.sys_create_task(self.sys_dns.start())
|
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Start Hass.io orchestration."""
|
"""Start Hass.io orchestration."""
|
||||||
await self.sys_api.start()
|
await self.sys_api.start()
|
||||||
@@ -142,10 +142,10 @@ class HassIO(CoreSysAttributes):
|
|||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[
|
[
|
||||||
self.sys_api.stop(),
|
self.sys_api.stop(),
|
||||||
self.sys_dns.stop(),
|
|
||||||
self.sys_websession.close(),
|
self.sys_websession.close(),
|
||||||
self.sys_websession_ssl.close(),
|
self.sys_websession_ssl.close(),
|
||||||
self.sys_ingress.unload(),
|
self.sys_ingress.unload(),
|
||||||
|
self.sys_dns.unload(),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
@@ -176,6 +176,7 @@ class HassIO(CoreSysAttributes):
|
|||||||
await self.sys_run_in_executor(self.sys_docker.repair)
|
await self.sys_run_in_executor(self.sys_docker.repair)
|
||||||
|
|
||||||
# Restore core functionality
|
# Restore core functionality
|
||||||
|
await self.sys_dns.repair()
|
||||||
await self.sys_addons.repair()
|
await self.sys_addons.repair()
|
||||||
await self.sys_homeassistant.repair()
|
await self.sys_homeassistant.repair()
|
||||||
|
|
||||||
|
@@ -1,14 +1,13 @@
|
|||||||
"""Handle core shared data."""
|
"""Handle core shared data."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import asyncio
|
import asyncio
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
from .config import CoreConfig
|
from .config import CoreConfig
|
||||||
from .const import CHANNEL_DEV
|
from .const import CHANNEL_DEV
|
||||||
from .docker import DockerAPI
|
from .docker import DockerAPI
|
||||||
from .misc.dns import DNSForward
|
|
||||||
from .misc.hardware import Hardware
|
from .misc.hardware import Hardware
|
||||||
from .misc.scheduler import Scheduler
|
from .misc.scheduler import Scheduler
|
||||||
|
|
||||||
@@ -20,6 +19,7 @@ if TYPE_CHECKING:
|
|||||||
from .core import HassIO
|
from .core import HassIO
|
||||||
from .dbus import DBusManager
|
from .dbus import DBusManager
|
||||||
from .discovery import Discovery
|
from .discovery import Discovery
|
||||||
|
from .dns import CoreDNS
|
||||||
from .hassos import HassOS
|
from .hassos import HassOS
|
||||||
from .homeassistant import HomeAssistant
|
from .homeassistant import HomeAssistant
|
||||||
from .host import HostManager
|
from .host import HostManager
|
||||||
@@ -52,26 +52,26 @@ class CoreSys:
|
|||||||
self._hardware: Hardware = Hardware()
|
self._hardware: Hardware = Hardware()
|
||||||
self._docker: DockerAPI = DockerAPI()
|
self._docker: DockerAPI = DockerAPI()
|
||||||
self._scheduler: Scheduler = Scheduler()
|
self._scheduler: Scheduler = Scheduler()
|
||||||
self._dns: DNSForward = DNSForward()
|
|
||||||
|
|
||||||
# Internal objects pointers
|
# Internal objects pointers
|
||||||
self._core: HassIO = None
|
self._core: Optional[HassIO] = None
|
||||||
self._arch: CpuArch = None
|
self._arch: Optional[CpuArch] = None
|
||||||
self._auth: Auth = None
|
self._auth: Optional[Auth] = None
|
||||||
self._homeassistant: HomeAssistant = None
|
self._dns: Optional[CoreDNS] = None
|
||||||
self._supervisor: Supervisor = None
|
self._homeassistant: Optional[HomeAssistant] = None
|
||||||
self._addons: AddonManager = None
|
self._supervisor: Optional[Supervisor] = None
|
||||||
self._api: RestAPI = None
|
self._addons: Optional[AddonManager] = None
|
||||||
self._updater: Updater = None
|
self._api: Optional[RestAPI] = None
|
||||||
self._snapshots: SnapshotManager = None
|
self._updater: Optional[Updater] = None
|
||||||
self._tasks: Tasks = None
|
self._snapshots: Optional[SnapshotManager] = None
|
||||||
self._host: HostManager = None
|
self._tasks: Optional[Tasks] = None
|
||||||
self._ingress: Ingress = None
|
self._host: Optional[HostManager] = None
|
||||||
self._dbus: DBusManager = None
|
self._ingress: Optional[Ingress] = None
|
||||||
self._hassos: HassOS = None
|
self._dbus: Optional[DBusManager] = None
|
||||||
self._services: ServiceManager = None
|
self._hassos: Optional[HassOS] = None
|
||||||
self._store: StoreManager = None
|
self._services: Optional[ServiceManager] = None
|
||||||
self._discovery: Discovery = None
|
self._store: Optional[StoreManager] = None
|
||||||
|
self._discovery: Optional[Discovery] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def machine(self) -> str:
|
def machine(self) -> str:
|
||||||
@@ -125,11 +125,6 @@ class CoreSys:
|
|||||||
"""Return Scheduler object."""
|
"""Return Scheduler object."""
|
||||||
return self._scheduler
|
return self._scheduler
|
||||||
|
|
||||||
@property
|
|
||||||
def dns(self) -> DNSForward:
|
|
||||||
"""Return DNSForward object."""
|
|
||||||
return self._dns
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def core(self) -> HassIO:
|
def core(self) -> HassIO:
|
||||||
"""Return HassIO object."""
|
"""Return HassIO object."""
|
||||||
@@ -298,6 +293,18 @@ class CoreSys:
|
|||||||
raise RuntimeError("DBusManager already set!")
|
raise RuntimeError("DBusManager already set!")
|
||||||
self._dbus = value
|
self._dbus = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self) -> CoreDNS:
|
||||||
|
"""Return CoreDNS object."""
|
||||||
|
return self._dns
|
||||||
|
|
||||||
|
@dns.setter
|
||||||
|
def dns(self, value: CoreDNS):
|
||||||
|
"""Set a CoreDNS object."""
|
||||||
|
if self._dns:
|
||||||
|
raise RuntimeError("CoreDNS already set!")
|
||||||
|
self._dns = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host(self) -> HostManager:
|
def host(self) -> HostManager:
|
||||||
"""Return HostManager object."""
|
"""Return HostManager object."""
|
||||||
@@ -395,11 +402,6 @@ class CoreSysAttributes:
|
|||||||
"""Return Scheduler object."""
|
"""Return Scheduler object."""
|
||||||
return self.coresys.scheduler
|
return self.coresys.scheduler
|
||||||
|
|
||||||
@property
|
|
||||||
def sys_dns(self) -> DNSForward:
|
|
||||||
"""Return DNSForward object."""
|
|
||||||
return self.coresys.dns
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sys_core(self) -> HassIO:
|
def sys_core(self) -> HassIO:
|
||||||
"""Return HassIO object."""
|
"""Return HassIO object."""
|
||||||
@@ -470,6 +472,11 @@ class CoreSysAttributes:
|
|||||||
"""Return DBusManager object."""
|
"""Return DBusManager object."""
|
||||||
return self.coresys.dbus
|
return self.coresys.dbus
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sys_dns(self) -> CoreDNS:
|
||||||
|
"""Return CoreDNS object."""
|
||||||
|
return self.coresys.dns
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sys_host(self) -> HostManager:
|
def sys_host(self) -> HostManager:
|
||||||
"""Return HostManager object."""
|
"""Return HostManager object."""
|
||||||
|
15
hassio/data/coredns.tmpl
Normal file
15
hassio/data/coredns.tmpl
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
.:53 {
|
||||||
|
log
|
||||||
|
errors
|
||||||
|
hosts /config/hosts {
|
||||||
|
fallthrough
|
||||||
|
}
|
||||||
|
template ANY AAAA local.hass.io hassio {
|
||||||
|
rcode NOERROR
|
||||||
|
}
|
||||||
|
forward . $servers {
|
||||||
|
except local.hass.io
|
||||||
|
policy sequential
|
||||||
|
health_check 10s
|
||||||
|
}
|
||||||
|
}
|
2
hassio/data/hosts.tmpl
Normal file
2
hassio/data/hosts.tmpl
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
$supervisor hassio supervisor.local.hass.io hassio.local.hass.io
|
||||||
|
$homeassistant homeassistant homeassistant.local.hass.io home-assistant.local.hass.io
|
@@ -1,39 +1,57 @@
|
|||||||
"""D-Bus interface objects."""
|
"""D-Bus interface objects."""
|
||||||
|
import logging
|
||||||
|
|
||||||
from .systemd import Systemd
|
from .systemd import Systemd
|
||||||
from .hostname import Hostname
|
from .hostname import Hostname
|
||||||
from .rauc import Rauc
|
from .rauc import Rauc
|
||||||
from ..coresys import CoreSysAttributes
|
from .nmi_dns import NMIDnsManager
|
||||||
|
from ..coresys import CoreSysAttributes, CoreSys
|
||||||
|
from ..exceptions import DBusNotConnectedError
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DBusManager(CoreSysAttributes):
|
class DBusManager(CoreSysAttributes):
|
||||||
"""A DBus Interface handler."""
|
"""A DBus Interface handler."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys: CoreSys) -> None:
|
||||||
"""Initialize D-Bus interface."""
|
"""Initialize D-Bus interface."""
|
||||||
self.coresys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
|
|
||||||
self._systemd = Systemd()
|
self._systemd: Systemd = Systemd()
|
||||||
self._hostname = Hostname()
|
self._hostname: Hostname = Hostname()
|
||||||
self._rauc = Rauc()
|
self._rauc: Rauc = Rauc()
|
||||||
|
self._nmi_dns: NMIDnsManager = NMIDnsManager()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def systemd(self):
|
def systemd(self) -> Systemd:
|
||||||
"""Return the systemd interface."""
|
"""Return the systemd interface."""
|
||||||
return self._systemd
|
return self._systemd
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hostname(self):
|
def hostname(self) -> Hostname:
|
||||||
"""Return the hostname interface."""
|
"""Return the hostname interface."""
|
||||||
return self._hostname
|
return self._hostname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rauc(self):
|
def rauc(self) -> Rauc:
|
||||||
"""Return the rauc interface."""
|
"""Return the rauc interface."""
|
||||||
return self._rauc
|
return self._rauc
|
||||||
|
|
||||||
async def load(self):
|
@property
|
||||||
|
def nmi_dns(self) -> NMIDnsManager:
|
||||||
|
"""Return NetworkManager DNS interface."""
|
||||||
|
return self._nmi_dns
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
"""Connect interfaces to D-Bus."""
|
"""Connect interfaces to D-Bus."""
|
||||||
await self.systemd.connect()
|
|
||||||
await self.hostname.connect()
|
try:
|
||||||
await self.rauc.connect()
|
await self.systemd.connect()
|
||||||
|
await self.hostname.connect()
|
||||||
|
await self.rauc.connect()
|
||||||
|
await self.nmi_dns.connect()
|
||||||
|
except DBusNotConnectedError:
|
||||||
|
_LOGGER.error(
|
||||||
|
"No DBus support from Host. Disabled any kind of host control!"
|
||||||
|
)
|
||||||
|
@@ -1,12 +1,13 @@
|
|||||||
"""D-Bus interface for hostname."""
|
"""D-Bus interface for hostname."""
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from .interface import DBusInterface
|
from .interface import DBusInterface
|
||||||
from .utils import dbus_connected
|
from .utils import dbus_connected
|
||||||
from ..exceptions import DBusError
|
from ..exceptions import DBusError, DBusInterfaceError
|
||||||
from ..utils.gdbus import DBus
|
from ..utils.gdbus import DBus
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DBUS_NAME = "org.freedesktop.hostname1"
|
DBUS_NAME = "org.freedesktop.hostname1"
|
||||||
DBUS_OBJECT = "/org/freedesktop/hostname1"
|
DBUS_OBJECT = "/org/freedesktop/hostname1"
|
||||||
@@ -15,12 +16,55 @@ DBUS_OBJECT = "/org/freedesktop/hostname1"
|
|||||||
class Hostname(DBusInterface):
|
class Hostname(DBusInterface):
|
||||||
"""Handle D-Bus interface for hostname/system."""
|
"""Handle D-Bus interface for hostname/system."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize Properties."""
|
||||||
|
self._hostname: Optional[str] = None
|
||||||
|
self._chassis: Optional[str] = None
|
||||||
|
self._deployment: Optional[str] = None
|
||||||
|
self._kernel: Optional[str] = None
|
||||||
|
self._operating_system: Optional[str] = None
|
||||||
|
self._cpe: Optional[str] = None
|
||||||
|
|
||||||
async def connect(self):
|
async def connect(self):
|
||||||
"""Connect to system's D-Bus."""
|
"""Connect to system's D-Bus."""
|
||||||
try:
|
try:
|
||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to hostname")
|
_LOGGER.warning("Can't connect to hostname")
|
||||||
|
except DBusInterfaceError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"No hostname support on the host. Hostname functions have been disabled."
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self) -> Optional[str]:
|
||||||
|
"""Return local hostname."""
|
||||||
|
return self._hostname
|
||||||
|
|
||||||
|
@property
|
||||||
|
def chassis(self) -> Optional[str]:
|
||||||
|
"""Return local chassis type."""
|
||||||
|
return self._chassis
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deployment(self) -> Optional[str]:
|
||||||
|
"""Return local deployment type."""
|
||||||
|
return self._deployment
|
||||||
|
|
||||||
|
@property
|
||||||
|
def kernel(self) -> Optional[str]:
|
||||||
|
"""Return local kernel version."""
|
||||||
|
return self._kernel
|
||||||
|
|
||||||
|
@property
|
||||||
|
def operating_system(self) -> Optional[str]:
|
||||||
|
"""Return local operating system."""
|
||||||
|
return self._operating_system
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cpe(self) -> Optional[str]:
|
||||||
|
"""Return local CPE."""
|
||||||
|
return self._cpe
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
def set_static_hostname(self, hostname):
|
def set_static_hostname(self, hostname):
|
||||||
@@ -31,9 +75,16 @@ class Hostname(DBusInterface):
|
|||||||
return self.dbus.SetStaticHostname(hostname, False)
|
return self.dbus.SetStaticHostname(hostname, False)
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
def get_properties(self):
|
async def update(self):
|
||||||
"""Return local host informations.
|
"""Update Properties."""
|
||||||
|
data = await self.dbus.get_properties(DBUS_NAME)
|
||||||
|
if not data:
|
||||||
|
_LOGGER.warning("Can't get properties for Hostname")
|
||||||
|
return
|
||||||
|
|
||||||
Return a coroutine.
|
self._hostname = data.get("StaticHostname")
|
||||||
"""
|
self._chassis = data.get("Chassis")
|
||||||
return self.dbus.get_properties(DBUS_NAME)
|
self._deployment = data.get("Deployment")
|
||||||
|
self._kernel = data.get("KernelRelease")
|
||||||
|
self._operating_system = data.get("OperatingSystemPrettyName")
|
||||||
|
self._cpe = data.get("OperatingSystemCPEName")
|
||||||
|
@@ -1,12 +1,13 @@
|
|||||||
"""Interface class for D-Bus wrappers."""
|
"""Interface class for D-Bus wrappers."""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from ..utils.gdbus import DBus
|
||||||
|
|
||||||
|
|
||||||
class DBusInterface:
|
class DBusInterface:
|
||||||
"""Handle D-Bus interface for hostname/system."""
|
"""Handle D-Bus interface for hostname/system."""
|
||||||
|
|
||||||
def __init__(self):
|
dbus: Optional[DBus] = None
|
||||||
"""Initialize systemd."""
|
|
||||||
self.dbus = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_connected(self):
|
def is_connected(self):
|
||||||
|
85
hassio/dbus/nmi_dns.py
Normal file
85
hassio/dbus/nmi_dns.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""D-Bus interface for hostname."""
|
||||||
|
import logging
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
import attr
|
||||||
|
|
||||||
|
from .interface import DBusInterface
|
||||||
|
from .utils import dbus_connected
|
||||||
|
from ..exceptions import DBusError, DBusInterfaceError
|
||||||
|
from ..utils.gdbus import DBus
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DBUS_NAME = "org.freedesktop.NetworkManager"
|
||||||
|
DBUS_OBJECT = "/org/freedesktop/NetworkManager/DnsManager"
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class DNSConfiguration:
|
||||||
|
"""NMI DnsManager configuration Object."""
|
||||||
|
|
||||||
|
nameservers: List[str] = attr.ib()
|
||||||
|
domains: List[str] = attr.ib()
|
||||||
|
interface: str = attr.ib()
|
||||||
|
priority: int = attr.ib()
|
||||||
|
vpn: bool = attr.ib()
|
||||||
|
|
||||||
|
|
||||||
|
class NMIDnsManager(DBusInterface):
|
||||||
|
"""Handle D-Bus interface for NMI DnsManager."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize Properties."""
|
||||||
|
self._mode: Optional[str] = None
|
||||||
|
self._rc_manager: Optional[str] = None
|
||||||
|
self._configuration: List[DNSConfiguration] = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mode(self) -> Optional[str]:
|
||||||
|
"""Return Propertie mode."""
|
||||||
|
return self._mode
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rc_manager(self) -> Optional[str]:
|
||||||
|
"""Return Propertie RcManager."""
|
||||||
|
return self._rc_manager
|
||||||
|
|
||||||
|
@property
|
||||||
|
def configuration(self) -> List[DNSConfiguration]:
|
||||||
|
"""Return Propertie configuraton."""
|
||||||
|
return self._configuration
|
||||||
|
|
||||||
|
async def connect(self) -> None:
|
||||||
|
"""Connect to system's D-Bus."""
|
||||||
|
try:
|
||||||
|
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||||
|
except DBusError:
|
||||||
|
_LOGGER.warning("Can't connect to DnsManager")
|
||||||
|
except DBusInterfaceError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"No DnsManager support on the host. Local DNS functions have been disabled."
|
||||||
|
)
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
async def update(self):
|
||||||
|
"""Update Properties."""
|
||||||
|
data = await self.dbus.get_properties(f"{DBUS_NAME}.DnsManager")
|
||||||
|
if not data:
|
||||||
|
_LOGGER.warning("Can't get properties for NMI DnsManager")
|
||||||
|
return
|
||||||
|
|
||||||
|
self._mode = data.get("Mode")
|
||||||
|
self._rc_manager = data.get("RcManager")
|
||||||
|
|
||||||
|
# Parse configuraton
|
||||||
|
self._configuration.clear()
|
||||||
|
for config in data.get("Configuration", []):
|
||||||
|
dns = DNSConfiguration(
|
||||||
|
config.get("nameservers"),
|
||||||
|
config.get("domains"),
|
||||||
|
config.get("interface"),
|
||||||
|
config.get("priority"),
|
||||||
|
config.get("vpn"),
|
||||||
|
)
|
||||||
|
self._configuration.append(dns)
|
@@ -3,10 +3,10 @@ import logging
|
|||||||
|
|
||||||
from .interface import DBusInterface
|
from .interface import DBusInterface
|
||||||
from .utils import dbus_connected
|
from .utils import dbus_connected
|
||||||
from ..exceptions import DBusError
|
from ..exceptions import DBusError, DBusInterfaceError
|
||||||
from ..utils.gdbus import DBus
|
from ..utils.gdbus import DBus
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DBUS_NAME = "de.pengutronix.rauc"
|
DBUS_NAME = "de.pengutronix.rauc"
|
||||||
DBUS_OBJECT = "/"
|
DBUS_OBJECT = "/"
|
||||||
@@ -21,6 +21,8 @@ class Rauc(DBusInterface):
|
|||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to rauc")
|
_LOGGER.warning("Can't connect to rauc")
|
||||||
|
except DBusInterfaceError:
|
||||||
|
_LOGGER.warning("Host has no rauc support. OTA updates have been disabled.")
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
def install(self, raucb_file):
|
def install(self, raucb_file):
|
||||||
|
@@ -3,10 +3,10 @@ import logging
|
|||||||
|
|
||||||
from .interface import DBusInterface
|
from .interface import DBusInterface
|
||||||
from .utils import dbus_connected
|
from .utils import dbus_connected
|
||||||
from ..exceptions import DBusError
|
from ..exceptions import DBusError, DBusInterfaceError
|
||||||
from ..utils.gdbus import DBus
|
from ..utils.gdbus import DBus
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DBUS_NAME = "org.freedesktop.systemd1"
|
DBUS_NAME = "org.freedesktop.systemd1"
|
||||||
DBUS_OBJECT = "/org/freedesktop/systemd1"
|
DBUS_OBJECT = "/org/freedesktop/systemd1"
|
||||||
@@ -21,6 +21,10 @@ class Systemd(DBusInterface):
|
|||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't connect to systemd")
|
_LOGGER.warning("Can't connect to systemd")
|
||||||
|
except DBusInterfaceError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"No systemd support on the host. Host control has been disabled."
|
||||||
|
)
|
||||||
|
|
||||||
@dbus_connected
|
@dbus_connected
|
||||||
def reboot(self):
|
def reboot(self):
|
||||||
|
@@ -19,7 +19,7 @@ from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
CMD_NEW = "post"
|
CMD_NEW = "post"
|
||||||
CMD_DEL = "delete"
|
CMD_DEL = "delete"
|
||||||
|
375
hassio/dns.py
Normal file
375
hassio/dns.py
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
"""Home Assistant control object."""
|
||||||
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
|
from ipaddress import IPv4Address
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from string import Template
|
||||||
|
from typing import Awaitable, List, Optional
|
||||||
|
|
||||||
|
import attr
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from .const import ATTR_SERVERS, ATTR_VERSION, DNS_SERVERS, DNS_SUFFIX, FILE_HASSIO_DNS
|
||||||
|
from .coresys import CoreSys, CoreSysAttributes
|
||||||
|
from .docker.dns import DockerDNS
|
||||||
|
from .docker.stats import DockerStats
|
||||||
|
from .exceptions import CoreDNSError, CoreDNSUpdateError, DockerAPIError
|
||||||
|
from .misc.forwarder import DNSForward
|
||||||
|
from .utils.json import JsonConfig
|
||||||
|
from .validate import DNS_URL, SCHEMA_DNS_CONFIG
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
COREDNS_TMPL: Path = Path(__file__).parents[0].joinpath("data/coredns.tmpl")
|
||||||
|
RESOLV_CONF: Path = Path("/etc/resolv.conf")
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class HostEntry:
|
||||||
|
"""Single entry in hosts."""
|
||||||
|
|
||||||
|
ip_address: IPv4Address = attr.ib()
|
||||||
|
names: List[str] = attr.ib()
|
||||||
|
|
||||||
|
|
||||||
|
class CoreDNS(JsonConfig, CoreSysAttributes):
|
||||||
|
"""Home Assistant core object for handle it."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys):
|
||||||
|
"""Initialize hass object."""
|
||||||
|
super().__init__(FILE_HASSIO_DNS, SCHEMA_DNS_CONFIG)
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.instance: DockerDNS = DockerDNS(coresys)
|
||||||
|
self.forwarder: DNSForward = DNSForward()
|
||||||
|
|
||||||
|
self._hosts: List[HostEntry] = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def corefile(self) -> Path:
|
||||||
|
"""Return Path to corefile."""
|
||||||
|
return Path(self.sys_config.path_dns, "corefile")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hosts(self) -> Path:
|
||||||
|
"""Return Path to corefile."""
|
||||||
|
return Path(self.sys_config.path_dns, "hosts")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def servers(self) -> List[str]:
|
||||||
|
"""Return list of DNS servers."""
|
||||||
|
return self._data[ATTR_SERVERS]
|
||||||
|
|
||||||
|
@servers.setter
|
||||||
|
def servers(self, value: List[str]) -> None:
|
||||||
|
"""Return list of DNS servers."""
|
||||||
|
self._data[ATTR_SERVERS] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self) -> Optional[str]:
|
||||||
|
"""Return current version of DNS."""
|
||||||
|
return self._data.get(ATTR_VERSION)
|
||||||
|
|
||||||
|
@version.setter
|
||||||
|
def version(self, value: str) -> None:
|
||||||
|
"""Return current version of DNS."""
|
||||||
|
self._data[ATTR_VERSION] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version(self) -> Optional[str]:
|
||||||
|
"""Return latest version of CoreDNS."""
|
||||||
|
return self.sys_updater.version_dns
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_progress(self) -> bool:
|
||||||
|
"""Return True if a task is in progress."""
|
||||||
|
return self.instance.in_progress
|
||||||
|
|
||||||
|
@property
|
||||||
|
def need_update(self) -> bool:
|
||||||
|
"""Return True if an update is available."""
|
||||||
|
return self.version != self.latest_version
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
|
"""Load DNS setup."""
|
||||||
|
self._init_hosts()
|
||||||
|
|
||||||
|
# Check CoreDNS state
|
||||||
|
try:
|
||||||
|
# Evaluate Version if we lost this information
|
||||||
|
if not self.version:
|
||||||
|
self.version = await self.instance.get_latest_version(key=int)
|
||||||
|
|
||||||
|
await self.instance.attach(tag=self.version)
|
||||||
|
except DockerAPIError:
|
||||||
|
_LOGGER.info(
|
||||||
|
"No CoreDNS plugin Docker image %s found.", self.instance.image
|
||||||
|
)
|
||||||
|
|
||||||
|
# Install CoreDNS
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
await self.install()
|
||||||
|
else:
|
||||||
|
self.version = self.instance.version
|
||||||
|
self.save_data()
|
||||||
|
|
||||||
|
# Start DNS forwarder
|
||||||
|
self.sys_create_task(self.forwarder.start(self.sys_docker.network.dns))
|
||||||
|
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
self._update_local_resolv()
|
||||||
|
|
||||||
|
# Start is not Running
|
||||||
|
if await self.instance.is_running():
|
||||||
|
await self.restart()
|
||||||
|
else:
|
||||||
|
await self.start()
|
||||||
|
|
||||||
|
async def unload(self) -> None:
|
||||||
|
"""Unload DNS forwarder."""
|
||||||
|
await self.forwarder.stop()
|
||||||
|
|
||||||
|
async def install(self) -> None:
|
||||||
|
"""Install CoreDNS."""
|
||||||
|
_LOGGER.info("Setup CoreDNS plugin")
|
||||||
|
while True:
|
||||||
|
# read homeassistant tag and install it
|
||||||
|
if not self.latest_version:
|
||||||
|
await self.sys_updater.reload()
|
||||||
|
|
||||||
|
if self.latest_version:
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.install(self.latest_version)
|
||||||
|
break
|
||||||
|
_LOGGER.warning("Error on install CoreDNS plugin. Retry in 30sec")
|
||||||
|
await asyncio.sleep(30)
|
||||||
|
|
||||||
|
_LOGGER.info("CoreDNS plugin now installed")
|
||||||
|
self.version = self.instance.version
|
||||||
|
self.save_data()
|
||||||
|
|
||||||
|
# Init Hosts / Run server
|
||||||
|
self.write_hosts()
|
||||||
|
await self.start()
|
||||||
|
|
||||||
|
async def update(self, version: Optional[str] = None) -> None:
|
||||||
|
"""Update CoreDNS plugin."""
|
||||||
|
version = version or self.latest_version
|
||||||
|
|
||||||
|
if version == self.version:
|
||||||
|
_LOGGER.warning("Version %s is already installed for CoreDNS", version)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.instance.update(version)
|
||||||
|
except DockerAPIError:
|
||||||
|
_LOGGER.error("CoreDNS update fails")
|
||||||
|
raise CoreDNSUpdateError() from None
|
||||||
|
else:
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.cleanup()
|
||||||
|
|
||||||
|
self.version = version
|
||||||
|
self.save_data()
|
||||||
|
|
||||||
|
# Start CoreDNS
|
||||||
|
await self.start()
|
||||||
|
|
||||||
|
async def restart(self) -> None:
|
||||||
|
"""Restart CoreDNS plugin."""
|
||||||
|
self._write_corefile()
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.restart()
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
"""Run CoreDNS."""
|
||||||
|
self._write_corefile()
|
||||||
|
|
||||||
|
# Start Instance
|
||||||
|
_LOGGER.info("Start CoreDNS plugin")
|
||||||
|
try:
|
||||||
|
await self.instance.run()
|
||||||
|
except DockerAPIError:
|
||||||
|
_LOGGER.error("Can't start CoreDNS plugin")
|
||||||
|
raise CoreDNSError() from None
|
||||||
|
|
||||||
|
async def reset(self) -> None:
|
||||||
|
"""Reset Config / Hosts."""
|
||||||
|
self.servers = DNS_SERVERS
|
||||||
|
|
||||||
|
# Resets hosts
|
||||||
|
with suppress(OSError):
|
||||||
|
self.hosts.unlink()
|
||||||
|
self._init_hosts()
|
||||||
|
|
||||||
|
await self.sys_addons.sync_dns()
|
||||||
|
|
||||||
|
def _write_corefile(self) -> None:
|
||||||
|
"""Write CoreDNS config."""
|
||||||
|
try:
|
||||||
|
corefile_template: Template = Template(COREDNS_TMPL.read_text())
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't read coredns template file: %s", err)
|
||||||
|
raise CoreDNSError() from None
|
||||||
|
|
||||||
|
# Prepare DNS serverlist: Prio 1 Local, Prio 2 Manual, Prio 3 Fallback
|
||||||
|
dns_servers = []
|
||||||
|
for server in self.sys_host.network.dns_servers + self.servers + DNS_SERVERS:
|
||||||
|
try:
|
||||||
|
DNS_URL(server)
|
||||||
|
if server not in dns_servers:
|
||||||
|
dns_servers.append(server)
|
||||||
|
except vol.Invalid:
|
||||||
|
_LOGGER.warning("Ignore invalid DNS Server: %s", server)
|
||||||
|
|
||||||
|
# Generate config file
|
||||||
|
data = corefile_template.safe_substitute(servers=" ".join(dns_servers))
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.corefile.write_text(data)
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't update corefile: %s", err)
|
||||||
|
raise CoreDNSError() from None
|
||||||
|
|
||||||
|
def _init_hosts(self) -> None:
|
||||||
|
"""Import hosts entry."""
|
||||||
|
# Generate Default
|
||||||
|
self.add_host(IPv4Address("127.0.0.1"), ["localhost"], write=False)
|
||||||
|
self.add_host(
|
||||||
|
self.sys_docker.network.supervisor, ["hassio", "supervisor"], write=False
|
||||||
|
)
|
||||||
|
self.add_host(
|
||||||
|
self.sys_docker.network.gateway,
|
||||||
|
["homeassistant", "home-assistant"],
|
||||||
|
write=False,
|
||||||
|
)
|
||||||
|
self.add_host(self.sys_docker.network.dns, ["dns"], write=False)
|
||||||
|
|
||||||
|
def write_hosts(self) -> None:
|
||||||
|
"""Write hosts from memory to file."""
|
||||||
|
try:
|
||||||
|
with self.hosts.open("w") as hosts:
|
||||||
|
for entry in self._hosts:
|
||||||
|
hosts.write(f"{entry.ip_address!s} {' '.join(entry.names)}\n")
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't write hosts file: %s", err)
|
||||||
|
raise CoreDNSError() from None
|
||||||
|
|
||||||
|
def add_host(self, ipv4: IPv4Address, names: List[str], write: bool = True) -> None:
|
||||||
|
"""Add a new host entry."""
|
||||||
|
if not ipv4 or ipv4 == IPv4Address("0.0.0.0"):
|
||||||
|
return
|
||||||
|
|
||||||
|
hostnames: List[str] = []
|
||||||
|
for name in names:
|
||||||
|
hostnames.append(name)
|
||||||
|
hostnames.append(f"{name}.{DNS_SUFFIX}")
|
||||||
|
|
||||||
|
# Generate host entry
|
||||||
|
entry = HostEntry(ipv4, hostnames)
|
||||||
|
old = self._search_host(hostnames)
|
||||||
|
|
||||||
|
if old:
|
||||||
|
_LOGGER.debug("Update Host entry %s -> %s", ipv4, hostnames)
|
||||||
|
self._hosts.remove(old)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Add Host entry %s -> %s", ipv4, hostnames)
|
||||||
|
self._hosts.append(entry)
|
||||||
|
|
||||||
|
# Update hosts file
|
||||||
|
if write:
|
||||||
|
self.write_hosts()
|
||||||
|
|
||||||
|
def delete_host(self, host: str, write: bool = True) -> None:
|
||||||
|
"""Remove a entry from hosts."""
|
||||||
|
entry = self._search_host([host])
|
||||||
|
|
||||||
|
# No match on hosts
|
||||||
|
if not entry:
|
||||||
|
_LOGGER.debug("Can't remove Host entry: %s", host)
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.debug("Remove Host entry %s - %s", entry.ip_address, entry.names)
|
||||||
|
self._hosts.remove(entry)
|
||||||
|
|
||||||
|
# Update hosts file
|
||||||
|
if write:
|
||||||
|
self.write_hosts()
|
||||||
|
|
||||||
|
def _search_host(self, names: List[str]) -> Optional[HostEntry]:
|
||||||
|
"""Search a host entry."""
|
||||||
|
for entry in self._hosts:
|
||||||
|
for name in names:
|
||||||
|
if name not in entry.names:
|
||||||
|
continue
|
||||||
|
return entry
|
||||||
|
|
||||||
|
def logs(self) -> Awaitable[bytes]:
|
||||||
|
"""Get CoreDNS docker logs.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.logs()
|
||||||
|
|
||||||
|
async def stats(self) -> DockerStats:
|
||||||
|
"""Return stats of CoreDNS."""
|
||||||
|
try:
|
||||||
|
return await self.instance.stats()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise CoreDNSError() from None
|
||||||
|
|
||||||
|
def is_running(self) -> Awaitable[bool]:
|
||||||
|
"""Return True if Docker container is running.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.is_running()
|
||||||
|
|
||||||
|
def is_fails(self) -> Awaitable[bool]:
|
||||||
|
"""Return True if a Docker container is fails state.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.is_fails()
|
||||||
|
|
||||||
|
async def repair(self) -> None:
|
||||||
|
"""Repair CoreDNS plugin."""
|
||||||
|
if await self.instance.exists():
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Repair CoreDNS %s", self.version)
|
||||||
|
try:
|
||||||
|
await self.instance.install(self.version)
|
||||||
|
except DockerAPIError:
|
||||||
|
_LOGGER.error("Repairing of CoreDNS fails")
|
||||||
|
|
||||||
|
def _update_local_resolv(self) -> None:
|
||||||
|
"""Update local resolv file."""
|
||||||
|
resolv_lines: List[str] = []
|
||||||
|
nameserver = f"nameserver {self.sys_docker.network.dns!s}"
|
||||||
|
|
||||||
|
# Read resolv config
|
||||||
|
try:
|
||||||
|
with RESOLV_CONF.open("r") as resolv:
|
||||||
|
for line in resolv.readlines():
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
resolv_lines.append(line.strip())
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.warning("Can't read local resolv: %s", err)
|
||||||
|
raise CoreDNSError() from None
|
||||||
|
|
||||||
|
if nameserver in resolv_lines:
|
||||||
|
return
|
||||||
|
_LOGGER.info("Update resolv from Supervisor")
|
||||||
|
|
||||||
|
# Write config back to resolv
|
||||||
|
resolv_lines.append(nameserver)
|
||||||
|
try:
|
||||||
|
with RESOLV_CONF.open("w") as resolv:
|
||||||
|
for line in resolv_lines:
|
||||||
|
resolv.write(f"{line}\n")
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.warning("Can't write local resolv: %s", err)
|
||||||
|
raise CoreDNSError() from None
|
@@ -1,16 +1,17 @@
|
|||||||
"""Init file for Hass.io Docker object."""
|
"""Init file for Hass.io Docker object."""
|
||||||
import logging
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from ipaddress import IPv4Address
|
||||||
|
import logging
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from ..const import SOCKET_DOCKER
|
from ..const import SOCKET_DOCKER, DNS_SUFFIX
|
||||||
from ..exceptions import DockerAPIError
|
from ..exceptions import DockerAPIError
|
||||||
from .network import DockerNetwork
|
from .network import DockerNetwork
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True)
|
@attr.s(frozen=True)
|
||||||
@@ -50,7 +51,11 @@ class DockerAPI:
|
|||||||
return self.docker.api
|
return self.docker.api
|
||||||
|
|
||||||
def run(
|
def run(
|
||||||
self, image: str, version: str = "latest", **kwargs: Dict[str, Any]
|
self,
|
||||||
|
image: str,
|
||||||
|
version: str = "latest",
|
||||||
|
ipv4: Optional[IPv4Address] = None,
|
||||||
|
**kwargs: Dict[str, Any],
|
||||||
) -> docker.models.containers.Container:
|
) -> docker.models.containers.Container:
|
||||||
""""Create a Docker container and run it.
|
""""Create a Docker container and run it.
|
||||||
|
|
||||||
@@ -60,12 +65,13 @@ class DockerAPI:
|
|||||||
network_mode: str = kwargs.get("network_mode")
|
network_mode: str = kwargs.get("network_mode")
|
||||||
hostname: str = kwargs.get("hostname")
|
hostname: str = kwargs.get("hostname")
|
||||||
|
|
||||||
|
# Setup DNS
|
||||||
|
kwargs["dns"] = [str(self.network.dns)]
|
||||||
|
kwargs["dns_search"] = [DNS_SUFFIX]
|
||||||
|
kwargs["domainname"] = DNS_SUFFIX
|
||||||
|
|
||||||
# Setup network
|
# Setup network
|
||||||
kwargs["dns_search"] = ["."]
|
if not network_mode:
|
||||||
if network_mode:
|
|
||||||
kwargs["dns"] = [str(self.network.supervisor)]
|
|
||||||
kwargs["dns_opt"] = ["ndots:0"]
|
|
||||||
else:
|
|
||||||
kwargs["network"] = None
|
kwargs["network"] = None
|
||||||
|
|
||||||
# Create container
|
# Create container
|
||||||
@@ -81,7 +87,7 @@ class DockerAPI:
|
|||||||
if not network_mode:
|
if not network_mode:
|
||||||
alias = [hostname] if hostname else None
|
alias = [hostname] if hostname else None
|
||||||
try:
|
try:
|
||||||
self.network.attach_container(container, alias=alias)
|
self.network.attach_container(container, alias=alias, ipv4=ipv4)
|
||||||
except DockerAPIError:
|
except DockerAPIError:
|
||||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||||
else:
|
else:
|
||||||
|
@@ -32,9 +32,10 @@ if TYPE_CHECKING:
|
|||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||||
|
NO_ADDDRESS = ip_address("0.0.0.0")
|
||||||
|
|
||||||
|
|
||||||
class DockerAddon(DockerInterface):
|
class DockerAddon(DockerInterface):
|
||||||
@@ -62,7 +63,7 @@ class DockerAddon(DockerInterface):
|
|||||||
self._meta["NetworkSettings"]["Networks"]["hassio"]["IPAddress"]
|
self._meta["NetworkSettings"]["Networks"]["hassio"]["IPAddress"]
|
||||||
)
|
)
|
||||||
except (KeyError, TypeError, ValueError):
|
except (KeyError, TypeError, ValueError):
|
||||||
return ip_address("0.0.0.0")
|
return NO_ADDDRESS
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timeout(self) -> int:
|
def timeout(self) -> int:
|
||||||
@@ -100,11 +101,6 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Return True if full access is enabled."""
|
"""Return True if full access is enabled."""
|
||||||
return not self.addon.protected and self.addon.with_full_access
|
return not self.addon.protected and self.addon.with_full_access
|
||||||
|
|
||||||
@property
|
|
||||||
def hostname(self) -> str:
|
|
||||||
"""Return slug/id of add-on."""
|
|
||||||
return self.addon.slug.replace("_", "-")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def environment(self) -> Dict[str, str]:
|
def environment(self) -> Dict[str, str]:
|
||||||
"""Return environment for Docker add-on."""
|
"""Return environment for Docker add-on."""
|
||||||
@@ -139,7 +135,14 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
# Auto mapping UART devices
|
# Auto mapping UART devices
|
||||||
if self.addon.auto_uart:
|
if self.addon.auto_uart:
|
||||||
for device in self.sys_hardware.serial_devices:
|
if self.addon.with_udev:
|
||||||
|
serial_devs = self.sys_hardware.serial_devices
|
||||||
|
else:
|
||||||
|
serial_devs = (
|
||||||
|
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||||
|
)
|
||||||
|
|
||||||
|
for device in serial_devs:
|
||||||
devices.append(f"{device}:{device}:rwm")
|
devices.append(f"{device}:{device}:rwm")
|
||||||
|
|
||||||
# Return None if no devices is present
|
# Return None if no devices is present
|
||||||
@@ -186,10 +189,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@property
|
@property
|
||||||
def network_mapping(self) -> Dict[str, str]:
|
def network_mapping(self) -> Dict[str, str]:
|
||||||
"""Return hosts mapping."""
|
"""Return hosts mapping."""
|
||||||
return {
|
return {"hassio": self.sys_docker.network.supervisor}
|
||||||
"homeassistant": self.sys_docker.network.gateway,
|
|
||||||
"hassio": self.sys_docker.network.supervisor,
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def network_mode(self) -> Optional[str]:
|
def network_mode(self) -> Optional[str]:
|
||||||
@@ -329,7 +329,7 @@ class DockerAddon(DockerInterface):
|
|||||||
self.image,
|
self.image,
|
||||||
version=self.addon.version,
|
version=self.addon.version,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
hostname=self.hostname,
|
hostname=self.addon.hostname,
|
||||||
detach=True,
|
detach=True,
|
||||||
init=True,
|
init=True,
|
||||||
privileged=self.full_access,
|
privileged=self.full_access,
|
||||||
@@ -350,6 +350,9 @@ class DockerAddon(DockerInterface):
|
|||||||
self._meta = docker_container.attrs
|
self._meta = docker_container.attrs
|
||||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version)
|
_LOGGER.info("Start Docker add-on %s with version %s", self.image, self.version)
|
||||||
|
|
||||||
|
# Write data to DNS server
|
||||||
|
self.sys_dns.add_host(ipv4=self.ip_address, names=[self.addon.hostname])
|
||||||
|
|
||||||
def _install(
|
def _install(
|
||||||
self, tag: str, image: Optional[str] = None, latest: bool = False
|
self, tag: str, image: Optional[str] = None, latest: bool = False
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -467,3 +470,12 @@ class DockerAddon(DockerInterface):
|
|||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||||
raise DockerAPIError() from None
|
raise DockerAPIError() from None
|
||||||
|
|
||||||
|
def _stop(self, remove_container=True) -> None:
|
||||||
|
"""Stop/remove Docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
if self.ip_address != NO_ADDDRESS:
|
||||||
|
self.sys_dns.delete_host(self.addon.hostname)
|
||||||
|
super()._stop(remove_container)
|
||||||
|
56
hassio/docker/dns.py
Normal file
56
hassio/docker/dns.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""HassOS Cli docker object."""
|
||||||
|
from contextlib import suppress
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ..const import ENV_TIME
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import DockerAPIError
|
||||||
|
from .interface import DockerInterface
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DNS_DOCKER_NAME: str = "hassio_dns"
|
||||||
|
|
||||||
|
|
||||||
|
class DockerDNS(DockerInterface, CoreSysAttributes):
|
||||||
|
"""Docker Hass.io wrapper for Hass.io DNS."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image(self) -> str:
|
||||||
|
"""Return name of Hass.io DNS image."""
|
||||||
|
return f"homeassistant/{self.sys_arch.supervisor}-hassio-dns"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
"""Return name of Docker container."""
|
||||||
|
return DNS_DOCKER_NAME
|
||||||
|
|
||||||
|
def _run(self) -> None:
|
||||||
|
"""Run Docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
if self._is_running():
|
||||||
|
return
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
self._stop()
|
||||||
|
|
||||||
|
# Create & Run container
|
||||||
|
docker_container = self.sys_docker.run(
|
||||||
|
self.image,
|
||||||
|
version=self.sys_dns.version,
|
||||||
|
ipv4=self.sys_docker.network.dns,
|
||||||
|
name=self.name,
|
||||||
|
hostname=self.name.replace("_", "-"),
|
||||||
|
detach=True,
|
||||||
|
init=True,
|
||||||
|
environment={ENV_TIME: self.sys_timezone},
|
||||||
|
volumes={
|
||||||
|
str(self.sys_config.path_extern_dns): {"bind": "/config", "mode": "ro"}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self._meta = docker_container.attrs
|
||||||
|
_LOGGER.info("Start DNS %s with version %s", self.image, self.version)
|
@@ -6,7 +6,7 @@ import docker
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .interface import DockerInterface
|
from .interface import DockerInterface
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||||
|
@@ -1,10 +1,8 @@
|
|||||||
"""Init file for Hass.io Docker object."""
|
"""Init file for Hass.io Docker object."""
|
||||||
from distutils.version import StrictVersion
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
import re
|
from typing import Awaitable, Optional
|
||||||
from typing import Awaitable, List, Optional
|
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
@@ -12,10 +10,9 @@ from ..const import ENV_TIME, ENV_TOKEN, LABEL_MACHINE
|
|||||||
from ..exceptions import DockerAPIError
|
from ..exceptions import DockerAPIError
|
||||||
from .interface import CommandReturn, DockerInterface
|
from .interface import CommandReturn, DockerInterface
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HASS_DOCKER_NAME = "homeassistant"
|
HASS_DOCKER_NAME = "homeassistant"
|
||||||
RE_VERSION = re.compile(r"(?P<version>\d+\.\d+\.\d+(?:b\d+|d\d+)?)")
|
|
||||||
|
|
||||||
|
|
||||||
class DockerHomeAssistant(DockerInterface):
|
class DockerHomeAssistant(DockerInterface):
|
||||||
@@ -139,33 +136,3 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_latest_version(self) -> Awaitable[str]:
|
|
||||||
"""Return latest version of local Home Asssistant image."""
|
|
||||||
return self.sys_run_in_executor(self._get_latest_version)
|
|
||||||
|
|
||||||
def _get_latest_version(self) -> str:
|
|
||||||
"""Return latest version of local Home Asssistant image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
available_version: List[str] = []
|
|
||||||
try:
|
|
||||||
for image in self.sys_docker.images.list(self.image):
|
|
||||||
for tag in image.tags:
|
|
||||||
match = RE_VERSION.search(tag)
|
|
||||||
if not match:
|
|
||||||
continue
|
|
||||||
available_version.append(match.group("version"))
|
|
||||||
|
|
||||||
assert available_version
|
|
||||||
|
|
||||||
except (docker.errors.DockerException, AssertionError):
|
|
||||||
_LOGGER.warning("No local HA version found")
|
|
||||||
raise DockerAPIError()
|
|
||||||
else:
|
|
||||||
_LOGGER.debug("Found HA versions: %s", available_version)
|
|
||||||
|
|
||||||
# Sort version and return latest version
|
|
||||||
available_version.sort(key=StrictVersion, reverse=True)
|
|
||||||
return available_version[0]
|
|
||||||
|
@@ -2,18 +2,18 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Dict, Optional, Awaitable
|
from typing import Any, Awaitable, Dict, List, Optional
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
|
from . import CommandReturn
|
||||||
from ..const import LABEL_ARCH, LABEL_VERSION
|
from ..const import LABEL_ARCH, LABEL_VERSION
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import DockerAPIError
|
from ..exceptions import DockerAPIError
|
||||||
from ..utils import process_lock
|
from ..utils import process_lock
|
||||||
from .stats import DockerStats
|
from .stats import DockerStats
|
||||||
from . import CommandReturn
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerInterface(CoreSysAttributes):
|
class DockerInterface(CoreSysAttributes):
|
||||||
@@ -50,7 +50,10 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def image(self) -> Optional[str]:
|
def image(self) -> Optional[str]:
|
||||||
"""Return name of Docker image."""
|
"""Return name of Docker image."""
|
||||||
return self.meta_config.get("Image")
|
try:
|
||||||
|
return self.meta_config["Image"].partition(":")[0]
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> Optional[str]:
|
def version(self) -> Optional[str]:
|
||||||
@@ -80,7 +83,6 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
image = image or self.image
|
image = image or self.image
|
||||||
image = image.partition(":")[0] # remove potential tag
|
|
||||||
|
|
||||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||||
try:
|
try:
|
||||||
@@ -248,7 +250,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
self, tag: str, image: Optional[str] = None, latest: bool = False
|
self, tag: str, image: Optional[str] = None, latest: bool = False
|
||||||
) -> Awaitable[None]:
|
) -> Awaitable[None]:
|
||||||
"""Update a Docker image."""
|
"""Update a Docker image."""
|
||||||
return self.sys_run_in_executor(self._update, tag, image)
|
return self.sys_run_in_executor(self._update, tag, image, latest)
|
||||||
|
|
||||||
def _update(
|
def _update(
|
||||||
self, tag: str, image: Optional[str] = None, latest: bool = False
|
self, tag: str, image: Optional[str] = None, latest: bool = False
|
||||||
@@ -264,7 +266,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update docker image
|
# Update docker image
|
||||||
self._install(tag, image, latest)
|
self._install(tag, image=image, latest=latest)
|
||||||
|
|
||||||
# Stop container & cleanup
|
# Stop container & cleanup
|
||||||
with suppress(DockerAPIError):
|
with suppress(DockerAPIError):
|
||||||
@@ -397,3 +399,35 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def get_latest_version(self, key: Any = int) -> Awaitable[str]:
|
||||||
|
"""Return latest version of local Home Asssistant image."""
|
||||||
|
return self.sys_run_in_executor(self._get_latest_version, key)
|
||||||
|
|
||||||
|
def _get_latest_version(self, key: Any = int) -> str:
|
||||||
|
"""Return latest version of local Home Asssistant image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
available_version: List[str] = []
|
||||||
|
try:
|
||||||
|
for image in self.sys_docker.images.list(self.image):
|
||||||
|
for tag in image.tags:
|
||||||
|
version = tag.partition(":")[2]
|
||||||
|
try:
|
||||||
|
key(version)
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
continue
|
||||||
|
available_version.append(version)
|
||||||
|
|
||||||
|
assert available_version
|
||||||
|
|
||||||
|
except (docker.errors.DockerException, AssertionError):
|
||||||
|
_LOGGER.debug("No version found for %s", self.image)
|
||||||
|
raise DockerAPIError()
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Found HA versions: %s", available_version)
|
||||||
|
|
||||||
|
# Sort version and return latest version
|
||||||
|
available_version.sort(key=key, reverse=True)
|
||||||
|
return available_version[0]
|
||||||
|
@@ -8,7 +8,7 @@ import docker
|
|||||||
from ..const import DOCKER_NETWORK, DOCKER_NETWORK_MASK, DOCKER_NETWORK_RANGE
|
from ..const import DOCKER_NETWORK, DOCKER_NETWORK_MASK, DOCKER_NETWORK_RANGE
|
||||||
from ..exceptions import DockerAPIError
|
from ..exceptions import DockerAPIError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerNetwork:
|
class DockerNetwork:
|
||||||
@@ -42,6 +42,11 @@ class DockerNetwork:
|
|||||||
"""Return supervisor of the network."""
|
"""Return supervisor of the network."""
|
||||||
return DOCKER_NETWORK_MASK[2]
|
return DOCKER_NETWORK_MASK[2]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self) -> IPv4Address:
|
||||||
|
"""Return dns of the network."""
|
||||||
|
return DOCKER_NETWORK_MASK[3]
|
||||||
|
|
||||||
def _get_network(self) -> docker.models.networks.Network:
|
def _get_network(self) -> docker.models.networks.Network:
|
||||||
"""Get HassIO network."""
|
"""Get HassIO network."""
|
||||||
try:
|
try:
|
||||||
|
@@ -20,6 +20,12 @@ class DockerStats:
|
|||||||
self._memory_usage = 0
|
self._memory_usage = 0
|
||||||
self._memory_limit = 0
|
self._memory_limit = 0
|
||||||
|
|
||||||
|
# Calculate percent usage
|
||||||
|
if self._memory_limit != 0:
|
||||||
|
self._memory_percent = self._memory_usage / self._memory_limit * 100.0
|
||||||
|
else:
|
||||||
|
self._memory_percent = 0
|
||||||
|
|
||||||
with suppress(KeyError):
|
with suppress(KeyError):
|
||||||
self._calc_cpu_percent(stats)
|
self._calc_cpu_percent(stats)
|
||||||
|
|
||||||
@@ -39,13 +45,12 @@ class DockerStats:
|
|||||||
stats["cpu_stats"]["system_cpu_usage"]
|
stats["cpu_stats"]["system_cpu_usage"]
|
||||||
- stats["precpu_stats"]["system_cpu_usage"]
|
- stats["precpu_stats"]["system_cpu_usage"]
|
||||||
)
|
)
|
||||||
|
online_cpu = stats["cpu_stats"]["online_cpus"]
|
||||||
|
|
||||||
|
if online_cpu == 0.0:
|
||||||
|
online_cpu = len(stats["cpu_stats"]["cpu_usage"]["percpu_usage"])
|
||||||
if system_delta > 0.0 and cpu_delta > 0.0:
|
if system_delta > 0.0 and cpu_delta > 0.0:
|
||||||
self._cpu = (
|
self._cpu = (cpu_delta / system_delta) * online_cpu * 100.0
|
||||||
(cpu_delta / system_delta)
|
|
||||||
* len(stats["cpu_stats"]["cpu_usage"]["percpu_usage"])
|
|
||||||
* 100.0
|
|
||||||
)
|
|
||||||
|
|
||||||
def _calc_network(self, networks):
|
def _calc_network(self, networks):
|
||||||
"""Calculate Network IO stats."""
|
"""Calculate Network IO stats."""
|
||||||
@@ -64,7 +69,7 @@ class DockerStats:
|
|||||||
@property
|
@property
|
||||||
def cpu_percent(self):
|
def cpu_percent(self):
|
||||||
"""Return CPU percent."""
|
"""Return CPU percent."""
|
||||||
return self._cpu
|
return round(self._cpu, 2)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def memory_usage(self):
|
def memory_usage(self):
|
||||||
@@ -76,6 +81,11 @@ class DockerStats:
|
|||||||
"""Return memory limit."""
|
"""Return memory limit."""
|
||||||
return self._memory_limit
|
return self._memory_limit
|
||||||
|
|
||||||
|
@property
|
||||||
|
def memory_percent(self):
|
||||||
|
"""Return memory usage in percent."""
|
||||||
|
return round(self._memory_percent, 2)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def network_rx(self):
|
def network_rx(self):
|
||||||
"""Return network rx stats."""
|
"""Return network rx stats."""
|
||||||
|
@@ -10,7 +10,7 @@ from ..coresys import CoreSysAttributes
|
|||||||
from ..exceptions import DockerAPIError
|
from ..exceptions import DockerAPIError
|
||||||
from .interface import DockerInterface
|
from .interface import DockerInterface
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||||
@@ -38,7 +38,9 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
|||||||
|
|
||||||
self._meta = docker_container.attrs
|
self._meta = docker_container.attrs
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Attach to Supervisor %s with version %s", self.image, self.version
|
"Attach to Supervisor %s with version %s",
|
||||||
|
self.image,
|
||||||
|
self.sys_supervisor.version,
|
||||||
)
|
)
|
||||||
|
|
||||||
# If already attach
|
# If already attach
|
||||||
|
@@ -54,6 +54,17 @@ class HassOSNotSupportedError(HassioNotSupportedError):
|
|||||||
"""Function not supported by HassOS."""
|
"""Function not supported by HassOS."""
|
||||||
|
|
||||||
|
|
||||||
|
# DNS
|
||||||
|
|
||||||
|
|
||||||
|
class CoreDNSError(HassioError):
|
||||||
|
"""CoreDNS exception."""
|
||||||
|
|
||||||
|
|
||||||
|
class CoreDNSUpdateError(CoreDNSError):
|
||||||
|
"""Error on update of a CoreDNS."""
|
||||||
|
|
||||||
|
|
||||||
# Addons
|
# Addons
|
||||||
|
|
||||||
|
|
||||||
@@ -138,6 +149,10 @@ class DBusNotConnectedError(HostNotSupportedError):
|
|||||||
"""DBus is not connected and call a method."""
|
"""DBus is not connected and call a method."""
|
||||||
|
|
||||||
|
|
||||||
|
class DBusInterfaceError(HassioNotSupportedError):
|
||||||
|
"""DBus interface not connected."""
|
||||||
|
|
||||||
|
|
||||||
class DBusFatalError(DBusError):
|
class DBusFatalError(DBusError):
|
||||||
"""DBus call going wrong."""
|
"""DBus call going wrong."""
|
||||||
|
|
||||||
|
@@ -18,7 +18,7 @@ from .exceptions import (
|
|||||||
DockerAPIError,
|
DockerAPIError,
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HassOS(CoreSysAttributes):
|
class HassOS(CoreSysAttributes):
|
||||||
@@ -188,13 +188,13 @@ class HassOS(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
await self.instance.update(version, latest=True)
|
await self.instance.update(version, latest=True)
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
await self.instance.cleanup()
|
|
||||||
except DockerAPIError:
|
except DockerAPIError:
|
||||||
_LOGGER.error("HassOS CLI update fails")
|
_LOGGER.error("HassOS CLI update fails")
|
||||||
raise HassOSUpdateError() from None
|
raise HassOSUpdateError() from None
|
||||||
|
else:
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.cleanup()
|
||||||
|
|
||||||
async def repair_cli(self) -> None:
|
async def repair_cli(self) -> None:
|
||||||
"""Repair CLI container."""
|
"""Repair CLI container."""
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import asynccontextmanager, suppress
|
from contextlib import asynccontextmanager, suppress
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from distutils.version import StrictVersion
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -46,7 +47,7 @@ from .utils import check_port, convert_to_ascii, process_lock
|
|||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||||
|
|
||||||
@@ -79,7 +80,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
# Evaluate Version if we lost this information
|
# Evaluate Version if we lost this information
|
||||||
if not self.version:
|
if not self.version:
|
||||||
self.version = await self.instance.get_latest_version()
|
self.version = await self.instance.get_latest_version(key=StrictVersion)
|
||||||
|
|
||||||
await self.instance.attach(tag=self.version)
|
await self.instance.attach(tag=self.version)
|
||||||
except DockerAPIError:
|
except DockerAPIError:
|
||||||
|
@@ -7,6 +7,7 @@ from .apparmor import AppArmorControl
|
|||||||
from .control import SystemControl
|
from .control import SystemControl
|
||||||
from .info import InfoCenter
|
from .info import InfoCenter
|
||||||
from .services import ServiceManager
|
from .services import ServiceManager
|
||||||
|
from .network import NetworkManager
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FEATURES_REBOOT,
|
FEATURES_REBOOT,
|
||||||
FEATURES_SHUTDOWN,
|
FEATURES_SHUTDOWN,
|
||||||
@@ -14,49 +15,56 @@ from ..const import (
|
|||||||
FEATURES_SERVICES,
|
FEATURES_SERVICES,
|
||||||
FEATURES_HASSOS,
|
FEATURES_HASSOS,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes, CoreSys
|
||||||
from ..exceptions import HassioError
|
from ..exceptions import HassioError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HostManager(CoreSysAttributes):
|
class HostManager(CoreSysAttributes):
|
||||||
"""Manage supported function from host."""
|
"""Manage supported function from host."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys: CoreSys):
|
||||||
"""Initialize Host manager."""
|
"""Initialize Host manager."""
|
||||||
self.coresys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self._alsa = AlsaAudio(coresys)
|
|
||||||
self._apparmor = AppArmorControl(coresys)
|
self._alsa: AlsaAudio = AlsaAudio(coresys)
|
||||||
self._control = SystemControl(coresys)
|
self._apparmor: AppArmorControl = AppArmorControl(coresys)
|
||||||
self._info = InfoCenter(coresys)
|
self._control: SystemControl = SystemControl(coresys)
|
||||||
self._services = ServiceManager(coresys)
|
self._info: InfoCenter = InfoCenter(coresys)
|
||||||
|
self._services: ServiceManager = ServiceManager(coresys)
|
||||||
|
self._network: NetworkManager = NetworkManager(coresys)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def alsa(self):
|
def alsa(self) -> AlsaAudio:
|
||||||
"""Return host ALSA handler."""
|
"""Return host ALSA handler."""
|
||||||
return self._alsa
|
return self._alsa
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def apparmor(self):
|
def apparmor(self) -> AppArmorControl:
|
||||||
"""Return host AppArmor handler."""
|
"""Return host AppArmor handler."""
|
||||||
return self._apparmor
|
return self._apparmor
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def control(self):
|
def control(self) -> SystemControl:
|
||||||
"""Return host control handler."""
|
"""Return host control handler."""
|
||||||
return self._control
|
return self._control
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def info(self):
|
def info(self) -> InfoCenter:
|
||||||
"""Return host info handler."""
|
"""Return host info handler."""
|
||||||
return self._info
|
return self._info
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def services(self):
|
def services(self) -> ServiceManager:
|
||||||
"""Return host services handler."""
|
"""Return host services handler."""
|
||||||
return self._services
|
return self._services
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network(self) -> NetworkManager:
|
||||||
|
"""Return host NetworkManager handler."""
|
||||||
|
return self._network
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supperted_features(self):
|
def supperted_features(self):
|
||||||
"""Return a list of supported host features."""
|
"""Return a list of supported host features."""
|
||||||
@@ -81,6 +89,9 @@ class HostManager(CoreSysAttributes):
|
|||||||
if self.sys_dbus.systemd.is_connected:
|
if self.sys_dbus.systemd.is_connected:
|
||||||
await self.services.update()
|
await self.services.update()
|
||||||
|
|
||||||
|
if self.sys_dbus.nmi_dns.is_connected:
|
||||||
|
await self.network.update()
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Load host information."""
|
"""Load host information."""
|
||||||
with suppress(HassioError):
|
with suppress(HassioError):
|
||||||
|
@@ -9,12 +9,16 @@ import attr
|
|||||||
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE
|
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
DefaultConfig = attr.make_class("DefaultConfig", ["input", "output"])
|
DefaultConfig = attr.make_class("DefaultConfig", ["input", "output"])
|
||||||
|
|
||||||
|
|
||||||
|
AUDIODB_JSON: Path = Path(__file__).parents[1].joinpath("data/audiodb.json")
|
||||||
|
ASOUND_TMPL: Path = Path(__file__).parents[1].joinpath("data/asound.tmpl")
|
||||||
|
|
||||||
|
|
||||||
class AlsaAudio(CoreSysAttributes):
|
class AlsaAudio(CoreSysAttributes):
|
||||||
"""Handle Audio ALSA host data."""
|
"""Handle Audio ALSA host data."""
|
||||||
|
|
||||||
@@ -82,12 +86,8 @@ class AlsaAudio(CoreSysAttributes):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _audio_database():
|
def _audio_database():
|
||||||
"""Read local json audio data into dict."""
|
"""Read local json audio data into dict."""
|
||||||
json_file = Path(__file__).parent.joinpath("data/audiodb.json")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# pylint: disable=no-member
|
return json.loads(AUDIODB_JSON.read_text())
|
||||||
with json_file.open("r") as database:
|
|
||||||
return json.loads(database.read())
|
|
||||||
except (ValueError, OSError) as err:
|
except (ValueError, OSError) as err:
|
||||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
_LOGGER.warning("Can't read audio DB: %s", err)
|
||||||
|
|
||||||
@@ -122,11 +122,8 @@ class AlsaAudio(CoreSysAttributes):
|
|||||||
alsa_output = alsa_output or self.default.output
|
alsa_output = alsa_output or self.default.output
|
||||||
|
|
||||||
# Read Template
|
# Read Template
|
||||||
asound_file = Path(__file__).parent.joinpath("data/asound.tmpl")
|
|
||||||
try:
|
try:
|
||||||
# pylint: disable=no-member
|
asound_data = ASOUND_TMPL.read_text()
|
||||||
with asound_file.open("r") as asound:
|
|
||||||
asound_data = asound.read()
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
||||||
return ""
|
return ""
|
||||||
|
@@ -7,7 +7,7 @@ from ..coresys import CoreSysAttributes
|
|||||||
from ..exceptions import DBusError, HostAppArmorError
|
from ..exceptions import DBusError, HostAppArmorError
|
||||||
from ..utils.apparmor import validate_profile
|
from ..utils.apparmor import validate_profile
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SYSTEMD_SERVICES = {"hassos-apparmor.service", "hassio-apparmor.service"}
|
SYSTEMD_SERVICES = {"hassos-apparmor.service", "hassio-apparmor.service"}
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@ import logging
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import HostNotSupportedError
|
from ..exceptions import HostNotSupportedError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
MANAGER = "manager"
|
MANAGER = "manager"
|
||||||
HOSTNAME = "hostname"
|
HOSTNAME = "hostname"
|
||||||
|
@@ -1,10 +1,11 @@
|
|||||||
"""Info control for host."""
|
"""Info control for host."""
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import HassioError, HostNotSupportedError
|
from ..exceptions import HostNotSupportedError, DBusNotConnectedError, DBusError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class InfoCenter(CoreSysAttributes):
|
class InfoCenter(CoreSysAttributes):
|
||||||
@@ -13,46 +14,44 @@ class InfoCenter(CoreSysAttributes):
|
|||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize system center handling."""
|
"""Initialize system center handling."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self._data = {}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hostname(self):
|
def hostname(self) -> Optional[str]:
|
||||||
"""Return local hostname."""
|
"""Return local hostname."""
|
||||||
return self._data.get("StaticHostname") or None
|
return self.sys_dbus.hostname.hostname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def chassis(self):
|
def chassis(self) -> Optional[str]:
|
||||||
"""Return local chassis type."""
|
"""Return local chassis type."""
|
||||||
return self._data.get("Chassis") or None
|
return self.sys_dbus.hostname.chassis
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def deployment(self):
|
def deployment(self) -> Optional[str]:
|
||||||
"""Return local deployment type."""
|
"""Return local deployment type."""
|
||||||
return self._data.get("Deployment") or None
|
return self.sys_dbus.hostname.deployment
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def kernel(self):
|
def kernel(self) -> Optional[str]:
|
||||||
"""Return local kernel version."""
|
"""Return local kernel version."""
|
||||||
return self._data.get("KernelRelease") or None
|
return self.sys_dbus.hostname.kernel
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def operating_system(self):
|
def operating_system(self) -> Optional[str]:
|
||||||
"""Return local operating system."""
|
"""Return local operating system."""
|
||||||
return self._data.get("OperatingSystemPrettyName") or None
|
return self.sys_dbus.hostname.operating_system
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cpe(self):
|
def cpe(self) -> Optional[str]:
|
||||||
"""Return local CPE."""
|
"""Return local CPE."""
|
||||||
return self._data.get("OperatingSystemCPEName") or None
|
return self.sys_dbus.hostname.cpe
|
||||||
|
|
||||||
async def update(self):
|
async def update(self):
|
||||||
"""Update properties over dbus."""
|
"""Update properties over dbus."""
|
||||||
if not self.sys_dbus.hostname.is_connected:
|
|
||||||
_LOGGER.error("No hostname D-Bus connection available")
|
|
||||||
raise HostNotSupportedError()
|
|
||||||
|
|
||||||
_LOGGER.info("Update local host information")
|
_LOGGER.info("Update local host information")
|
||||||
try:
|
try:
|
||||||
self._data = await self.sys_dbus.hostname.get_properties()
|
await self.sys_dbus.hostname.update()
|
||||||
except HassioError:
|
except DBusError:
|
||||||
_LOGGER.warning("Can't update host system information!")
|
_LOGGER.warning("Can't update host system information!")
|
||||||
|
except DBusNotConnectedError:
|
||||||
|
_LOGGER.error("No hostname D-Bus connection available")
|
||||||
|
raise HostNotSupportedError() from None
|
||||||
|
39
hassio/host/network.py
Normal file
39
hassio/host/network.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
"""Info control for host."""
|
||||||
|
import logging
|
||||||
|
from typing import List, Set
|
||||||
|
|
||||||
|
from ..coresys import CoreSysAttributes, CoreSys
|
||||||
|
from ..exceptions import HostNotSupportedError, DBusNotConnectedError, DBusError
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NetworkManager(CoreSysAttributes):
|
||||||
|
"""Handle local network setup."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys):
|
||||||
|
"""Initialize system center handling."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns_servers(self) -> List[str]:
|
||||||
|
"""Return a list of local DNS servers."""
|
||||||
|
# Read all local dns servers
|
||||||
|
servers: Set[str] = set()
|
||||||
|
for config in self.sys_dbus.nmi_dns.configuration:
|
||||||
|
if config.vpn or not config.nameservers:
|
||||||
|
continue
|
||||||
|
servers |= set(config.nameservers)
|
||||||
|
|
||||||
|
return [f"dns://{server}" for server in servers]
|
||||||
|
|
||||||
|
async def update(self):
|
||||||
|
"""Update properties over dbus."""
|
||||||
|
_LOGGER.info("Update local network DNS information")
|
||||||
|
try:
|
||||||
|
await self.sys_dbus.nmi_dns.update()
|
||||||
|
except DBusError:
|
||||||
|
_LOGGER.warning("Can't update host DNS system information!")
|
||||||
|
except DBusNotConnectedError:
|
||||||
|
_LOGGER.error("No hostname D-Bus connection available")
|
||||||
|
raise HostNotSupportedError() from None
|
@@ -6,7 +6,7 @@ import attr
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import HassioError, HostNotSupportedError, HostServiceError
|
from ..exceptions import HassioError, HostNotSupportedError, HostServiceError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
MOD_REPLACE = "replace"
|
MOD_REPLACE = "replace"
|
||||||
|
|
||||||
|
@@ -12,7 +12,7 @@ from .utils.dt import utc_from_timestamp, utcnow
|
|||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_INGRESS_CONFIG
|
from .validate import SCHEMA_INGRESS_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Ingress(JsonConfig, CoreSysAttributes):
|
class Ingress(JsonConfig, CoreSysAttributes):
|
||||||
|
@@ -2,12 +2,14 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import shlex
|
import shlex
|
||||||
|
from ipaddress import IPv4Address
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import async_timeout
|
import async_timeout
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:{!s}:53"
|
||||||
|
|
||||||
|
|
||||||
class DNSForward:
|
class DNSForward:
|
||||||
@@ -15,13 +17,13 @@ class DNSForward:
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize DNS forwarding."""
|
"""Initialize DNS forwarding."""
|
||||||
self.proc = None
|
self.proc: Optional[asyncio.Process] = None
|
||||||
|
|
||||||
async def start(self):
|
async def start(self, dns_server: IPv4Address) -> None:
|
||||||
"""Start DNS forwarding."""
|
"""Start DNS forwarding."""
|
||||||
try:
|
try:
|
||||||
self.proc = await asyncio.create_subprocess_exec(
|
self.proc = await asyncio.create_subprocess_exec(
|
||||||
*shlex.split(COMMAND),
|
*shlex.split(COMMAND.format(dns_server)),
|
||||||
stdin=asyncio.subprocess.DEVNULL,
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
stdout=asyncio.subprocess.DEVNULL,
|
stdout=asyncio.subprocess.DEVNULL,
|
||||||
stderr=asyncio.subprocess.DEVNULL,
|
stderr=asyncio.subprocess.DEVNULL,
|
||||||
@@ -29,9 +31,9 @@ class DNSForward:
|
|||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
_LOGGER.info("Start DNS port forwarding to %s", dns_server)
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self) -> None:
|
||||||
"""Stop DNS forwarding."""
|
"""Stop DNS forwarding."""
|
||||||
if not self.proc:
|
if not self.proc:
|
||||||
_LOGGER.warning("DNS forwarding is not running!")
|
_LOGGER.warning("DNS forwarding is not running!")
|
@@ -3,25 +3,26 @@ from datetime import datetime
|
|||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
|
from typing import Any, Dict, Optional, Set
|
||||||
|
|
||||||
import pyudev
|
import pyudev
|
||||||
|
|
||||||
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES, CHAN_ID, CHAN_TYPE
|
from ..const import ATTR_DEVICES, ATTR_NAME, ATTR_TYPE, CHAN_ID, CHAN_TYPE
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
ASOUND_CARDS = Path("/proc/asound/cards")
|
ASOUND_CARDS: Path = Path("/proc/asound/cards")
|
||||||
RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
RE_CARDS: re.Pattern = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
||||||
|
|
||||||
ASOUND_DEVICES = Path("/proc/asound/devices")
|
ASOUND_DEVICES: Path = Path("/proc/asound/devices")
|
||||||
RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
RE_DEVICES: re.Pattern = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
||||||
|
|
||||||
PROC_STAT = Path("/proc/stat")
|
PROC_STAT: Path = Path("/proc/stat")
|
||||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
RE_BOOT_TIME: re.Pattern = re.compile(r"btime (\d+)")
|
||||||
|
|
||||||
GPIO_DEVICES = Path("/sys/class/gpio")
|
GPIO_DEVICES: Path = Path("/sys/class/gpio")
|
||||||
SOC_DEVICES = Path("/sys/devices/platform/soc")
|
SOC_DEVICES: Path = Path("/sys/devices/platform/soc")
|
||||||
RE_TTY = re.compile(r"tty[A-Z]+")
|
RE_TTY: re.Pattern = re.compile(r"tty[A-Z]+")
|
||||||
|
|
||||||
|
|
||||||
class Hardware:
|
class Hardware:
|
||||||
@@ -32,13 +33,21 @@ class Hardware:
|
|||||||
self.context = pyudev.Context()
|
self.context = pyudev.Context()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serial_devices(self):
|
def serial_devices(self) -> Set[str]:
|
||||||
"""Return all serial and connected devices."""
|
"""Return all serial and connected devices."""
|
||||||
dev_list = set()
|
dev_list: Set[str] = set()
|
||||||
for device in self.context.list_devices(subsystem="tty"):
|
for device in self.context.list_devices(subsystem="tty"):
|
||||||
if "ID_VENDOR" in device.properties or RE_TTY.search(device.device_node):
|
if "ID_VENDOR" in device.properties or RE_TTY.search(device.device_node):
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
|
return dev_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial_by_id(self) -> Set[str]:
|
||||||
|
"""Return all /dev/serial/by-id for serial devices."""
|
||||||
|
dev_list: Set[str] = set()
|
||||||
|
for device in self.context.list_devices(subsystem="tty"):
|
||||||
|
if "ID_VENDOR" in device.properties or RE_TTY.search(device.device_node):
|
||||||
# Add /dev/serial/by-id devlink for current device
|
# Add /dev/serial/by-id devlink for current device
|
||||||
for dev_link in device.device_links:
|
for dev_link in device.device_links:
|
||||||
if not dev_link.startswith("/dev/serial/by-id"):
|
if not dev_link.startswith("/dev/serial/by-id"):
|
||||||
@@ -48,9 +57,9 @@ class Hardware:
|
|||||||
return dev_list
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def input_devices(self):
|
def input_devices(self) -> Set[str]:
|
||||||
"""Return all input devices."""
|
"""Return all input devices."""
|
||||||
dev_list = set()
|
dev_list: Set[str] = set()
|
||||||
for device in self.context.list_devices(subsystem="input"):
|
for device in self.context.list_devices(subsystem="input"):
|
||||||
if "NAME" in device.properties:
|
if "NAME" in device.properties:
|
||||||
dev_list.add(device.properties["NAME"].replace('"', ""))
|
dev_list.add(device.properties["NAME"].replace('"', ""))
|
||||||
@@ -58,9 +67,9 @@ class Hardware:
|
|||||||
return dev_list
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def disk_devices(self):
|
def disk_devices(self) -> Set[str]:
|
||||||
"""Return all disk devices."""
|
"""Return all disk devices."""
|
||||||
dev_list = set()
|
dev_list: Set[str] = set()
|
||||||
for device in self.context.list_devices(subsystem="block"):
|
for device in self.context.list_devices(subsystem="block"):
|
||||||
if "ID_NAME" in device.properties:
|
if "ID_NAME" in device.properties:
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
@@ -68,15 +77,15 @@ class Hardware:
|
|||||||
return dev_list
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def support_audio(self):
|
def support_audio(self) -> bool:
|
||||||
"""Return True if the system have audio support."""
|
"""Return True if the system have audio support."""
|
||||||
return bool(self.audio_devices)
|
return bool(self.audio_devices)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_devices(self):
|
def audio_devices(self) -> Dict[str, Any]:
|
||||||
"""Return all available audio interfaces."""
|
"""Return all available audio interfaces."""
|
||||||
if not ASOUND_CARDS.exists():
|
if not ASOUND_CARDS.exists():
|
||||||
_LOGGER.debug("No audio devices found")
|
_LOGGER.info("No audio devices found")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -86,7 +95,7 @@ class Hardware:
|
|||||||
_LOGGER.error("Can't read asound data: %s", err)
|
_LOGGER.error("Can't read asound data: %s", err)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
audio_list = {}
|
audio_list: Dict[str, Any] = {}
|
||||||
|
|
||||||
# parse cards
|
# parse cards
|
||||||
for match in RE_CARDS.finditer(cards):
|
for match in RE_CARDS.finditer(cards):
|
||||||
@@ -109,31 +118,31 @@ class Hardware:
|
|||||||
return audio_list
|
return audio_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def support_gpio(self):
|
def support_gpio(self) -> bool:
|
||||||
"""Return True if device support GPIOs."""
|
"""Return True if device support GPIOs."""
|
||||||
return SOC_DEVICES.exists() and GPIO_DEVICES.exists()
|
return SOC_DEVICES.exists() and GPIO_DEVICES.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def gpio_devices(self):
|
def gpio_devices(self) -> Set[str]:
|
||||||
"""Return list of GPIO interface on device."""
|
"""Return list of GPIO interface on device."""
|
||||||
dev_list = set()
|
dev_list: Set[str] = set()
|
||||||
for interface in GPIO_DEVICES.glob("gpio*"):
|
for interface in GPIO_DEVICES.glob("gpio*"):
|
||||||
dev_list.add(interface.name)
|
dev_list.add(interface.name)
|
||||||
|
|
||||||
return dev_list
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_boot(self):
|
def last_boot(self) -> Optional[str]:
|
||||||
"""Return last boot time."""
|
"""Return last boot time."""
|
||||||
try:
|
try:
|
||||||
with PROC_STAT.open("r") as stat_file:
|
with PROC_STAT.open("r") as stat_file:
|
||||||
stats = stat_file.read()
|
stats: str = stat_file.read()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't read stat data: %s", err)
|
_LOGGER.error("Can't read stat data: %s", err)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# parse stat file
|
# parse stat file
|
||||||
found = RE_BOOT_TIME.search(stats)
|
found: Optional[re.Match] = RE_BOOT_TIME.search(stats)
|
||||||
if not found:
|
if not found:
|
||||||
_LOGGER.error("Can't found last boot time!")
|
_LOGGER.error("Can't found last boot time!")
|
||||||
return None
|
return None
|
||||||
|
@@ -3,7 +3,7 @@ import asyncio
|
|||||||
from datetime import date, datetime, time, timedelta
|
from datetime import date, datetime, time, timedelta
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
INTERVAL = "interval"
|
INTERVAL = "interval"
|
||||||
REPEAT = "repeat"
|
REPEAT = "repeat"
|
||||||
|
@@ -19,7 +19,7 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..interface import ServiceInterface
|
from ..interface import ServiceInterface
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
|
@@ -9,7 +9,7 @@ from ..const import FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..utils.dt import utcnow
|
from ..utils.dt import utcnow
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SnapshotManager(CoreSysAttributes):
|
class SnapshotManager(CoreSysAttributes):
|
||||||
|
@@ -45,7 +45,7 @@ from ..utils.tar import SecureTarFile
|
|||||||
from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder
|
from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder
|
||||||
from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT
|
from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Snapshot(CoreSysAttributes):
|
class Snapshot(CoreSysAttributes):
|
||||||
|
@@ -9,7 +9,7 @@ from .addon import AddonStore
|
|||||||
from .data import StoreData
|
from .data import StoreData
|
||||||
from .repository import Repository
|
from .repository import Repository
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@ import logging
|
|||||||
from ..coresys import CoreSys
|
from ..coresys import CoreSys
|
||||||
from ..addons.model import AddonModel, Data
|
from ..addons.model import AddonModel, Data
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class AddonStore(AddonModel):
|
class AddonStore(AddonModel):
|
||||||
|
@@ -20,7 +20,7 @@ from ..utils.json import read_json_file
|
|||||||
from .utils import extract_hash_from_path
|
from .utils import extract_hash_from_path
|
||||||
from .validate import SCHEMA_REPOSITORY_CONFIG
|
from .validate import SCHEMA_REPOSITORY_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class StoreData(CoreSysAttributes):
|
class StoreData(CoreSysAttributes):
|
||||||
|
@@ -12,7 +12,7 @@ from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..validate import RE_REPOSITORY
|
from ..validate import RE_REPOSITORY
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class GitRepo(CoreSysAttributes):
|
class GitRepo(CoreSysAttributes):
|
||||||
|
@@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_hash_from_repository(name: str) -> str:
|
def get_hash_from_repository(name: str) -> str:
|
||||||
|
@@ -20,7 +20,7 @@ from .exceptions import (
|
|||||||
SupervisorUpdateError,
|
SupervisorUpdateError,
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Supervisor(CoreSysAttributes):
|
class Supervisor(CoreSysAttributes):
|
||||||
@@ -109,7 +109,7 @@ class Supervisor(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Update Supervisor to version %s", version)
|
_LOGGER.info("Update Supervisor to version %s", version)
|
||||||
try:
|
try:
|
||||||
await self.instance.install(version, latest=True)
|
await self.instance.update(version, latest=True)
|
||||||
except DockerAPIError:
|
except DockerAPIError:
|
||||||
_LOGGER.error("Update of Hass.io fails!")
|
_LOGGER.error("Update of Hass.io fails!")
|
||||||
raise SupervisorUpdateError() from None
|
raise SupervisorUpdateError() from None
|
||||||
|
@@ -3,25 +3,28 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
from .exceptions import HomeAssistantError
|
from .exceptions import HomeAssistantError, CoreDNSError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HASS_WATCHDOG_API = "HASS_WATCHDOG_API"
|
HASS_WATCHDOG_API = "HASS_WATCHDOG_API"
|
||||||
|
|
||||||
RUN_UPDATE_SUPERVISOR = 29100
|
RUN_UPDATE_SUPERVISOR = 29100
|
||||||
RUN_UPDATE_ADDONS = 57600
|
RUN_UPDATE_ADDONS = 57600
|
||||||
RUN_UPDATE_HASSOSCLI = 29100
|
RUN_UPDATE_HASSOSCLI = 28100
|
||||||
|
RUN_UPDATE_DNS = 30100
|
||||||
|
|
||||||
RUN_RELOAD_ADDONS = 21600
|
RUN_RELOAD_ADDONS = 10800
|
||||||
RUN_RELOAD_SNAPSHOTS = 72000
|
RUN_RELOAD_SNAPSHOTS = 72000
|
||||||
RUN_RELOAD_HOST = 72000
|
RUN_RELOAD_HOST = 72000
|
||||||
RUN_RELOAD_UPDATER = 21600
|
RUN_RELOAD_UPDATER = 7200
|
||||||
RUN_RELOAD_INGRESS = 930
|
RUN_RELOAD_INGRESS = 930
|
||||||
|
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||||
|
|
||||||
|
RUN_WATCHDOG_DNS_DOCKER = 20
|
||||||
|
|
||||||
|
|
||||||
class Tasks(CoreSysAttributes):
|
class Tasks(CoreSysAttributes):
|
||||||
"""Handle Tasks inside Hass.io."""
|
"""Handle Tasks inside Hass.io."""
|
||||||
@@ -48,6 +51,9 @@ class Tasks(CoreSysAttributes):
|
|||||||
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI
|
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
self.jobs.add(
|
||||||
|
self.sys_scheduler.register_task(self._update_dns, RUN_UPDATE_DNS)
|
||||||
|
)
|
||||||
|
|
||||||
# Reload
|
# Reload
|
||||||
self.jobs.add(
|
self.jobs.add(
|
||||||
@@ -83,6 +89,11 @@ class Tasks(CoreSysAttributes):
|
|||||||
self._watchdog_homeassistant_api, RUN_WATCHDOG_HOMEASSISTANT_API
|
self._watchdog_homeassistant_api, RUN_WATCHDOG_HOMEASSISTANT_API
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
self.jobs.add(
|
||||||
|
self.sys_scheduler.register_task(
|
||||||
|
self._watchdog_dns_docker, RUN_WATCHDOG_DNS_DOCKER
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER.info("All core tasks are scheduled")
|
_LOGGER.info("All core tasks are scheduled")
|
||||||
|
|
||||||
@@ -194,3 +205,32 @@ class Tasks(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Found new HassOS CLI version")
|
_LOGGER.info("Found new HassOS CLI version")
|
||||||
await self.sys_hassos.update_cli()
|
await self.sys_hassos.update_cli()
|
||||||
|
|
||||||
|
async def _update_dns(self):
|
||||||
|
"""Check and run update of CoreDNS plugin."""
|
||||||
|
if not self.sys_dns.need_update:
|
||||||
|
return
|
||||||
|
|
||||||
|
# don't perform an update on dev channel
|
||||||
|
if self.sys_dev:
|
||||||
|
_LOGGER.warning("Ignore CoreDNS update on dev channel!")
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Found new CoreDNS plugin version")
|
||||||
|
await self.sys_dns.update()
|
||||||
|
|
||||||
|
async def _watchdog_dns_docker(self):
|
||||||
|
"""Check running state of Docker and start if they is close."""
|
||||||
|
# if Home Assistant is active
|
||||||
|
if await self.sys_dns.is_running():
|
||||||
|
return
|
||||||
|
_LOGGER.warning("Watchdog found a problem with CoreDNS plugin!")
|
||||||
|
|
||||||
|
if await self.sys_dns.is_fails():
|
||||||
|
_LOGGER.warning("CoreDNS plugin is in fails state / Reset config")
|
||||||
|
await self.sys_dns.reset()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_dns.start()
|
||||||
|
except CoreDNSError:
|
||||||
|
_LOGGER.error("Watchdog CoreDNS reanimation fails!")
|
||||||
|
@@ -4,25 +4,27 @@ from contextlib import suppress
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
URL_HASSIO_VERSION,
|
|
||||||
FILE_HASSIO_UPDATER,
|
|
||||||
ATTR_HOMEASSISTANT,
|
|
||||||
ATTR_HASSIO,
|
|
||||||
ATTR_CHANNEL,
|
ATTR_CHANNEL,
|
||||||
|
ATTR_DNS,
|
||||||
|
ATTR_HASSIO,
|
||||||
ATTR_HASSOS,
|
ATTR_HASSOS,
|
||||||
ATTR_HASSOS_CLI,
|
ATTR_HASSOS_CLI,
|
||||||
|
ATTR_HOMEASSISTANT,
|
||||||
|
FILE_HASSIO_UPDATER,
|
||||||
|
URL_HASSIO_VERSION,
|
||||||
)
|
)
|
||||||
from .coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
|
from .exceptions import HassioUpdaterError
|
||||||
from .utils import AsyncThrottle
|
from .utils import AsyncThrottle
|
||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_UPDATER_CONFIG
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
from .exceptions import HassioUpdaterError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Updater(JsonConfig, CoreSysAttributes):
|
class Updater(JsonConfig, CoreSysAttributes):
|
||||||
@@ -33,43 +35,48 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
|
|
||||||
async def load(self):
|
async def load(self) -> None:
|
||||||
"""Update internal data."""
|
"""Update internal data."""
|
||||||
with suppress(HassioUpdaterError):
|
with suppress(HassioUpdaterError):
|
||||||
await self.fetch_data()
|
await self.fetch_data()
|
||||||
|
|
||||||
async def reload(self):
|
async def reload(self) -> None:
|
||||||
"""Update internal data."""
|
"""Update internal data."""
|
||||||
with suppress(HassioUpdaterError):
|
with suppress(HassioUpdaterError):
|
||||||
await self.fetch_data()
|
await self.fetch_data()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_homeassistant(self):
|
def version_homeassistant(self) -> Optional[str]:
|
||||||
"""Return last version of Home Assistant."""
|
"""Return latest version of Home Assistant."""
|
||||||
return self._data.get(ATTR_HOMEASSISTANT)
|
return self._data.get(ATTR_HOMEASSISTANT)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_hassio(self):
|
def version_hassio(self) -> Optional[str]:
|
||||||
"""Return last version of Hass.io."""
|
"""Return latest version of Hass.io."""
|
||||||
return self._data.get(ATTR_HASSIO)
|
return self._data.get(ATTR_HASSIO)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_hassos(self):
|
def version_hassos(self) -> Optional[str]:
|
||||||
"""Return last version of HassOS."""
|
"""Return latest version of HassOS."""
|
||||||
return self._data.get(ATTR_HASSOS)
|
return self._data.get(ATTR_HASSOS)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_hassos_cli(self):
|
def version_hassos_cli(self) -> Optional[str]:
|
||||||
"""Return last version of HassOS cli."""
|
"""Return latest version of HassOS cli."""
|
||||||
return self._data.get(ATTR_HASSOS_CLI)
|
return self._data.get(ATTR_HASSOS_CLI)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def channel(self):
|
def version_dns(self) -> Optional[str]:
|
||||||
|
"""Return latest version of Hass.io DNS."""
|
||||||
|
return self._data.get(ATTR_DNS)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def channel(self) -> str:
|
||||||
"""Return upstream channel of Hass.io instance."""
|
"""Return upstream channel of Hass.io instance."""
|
||||||
return self._data[ATTR_CHANNEL]
|
return self._data[ATTR_CHANNEL]
|
||||||
|
|
||||||
@channel.setter
|
@channel.setter
|
||||||
def channel(self, value):
|
def channel(self, value: str):
|
||||||
"""Set upstream mode."""
|
"""Set upstream mode."""
|
||||||
self._data[ATTR_CHANNEL] = value
|
self._data[ATTR_CHANNEL] = value
|
||||||
|
|
||||||
@@ -104,6 +111,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
# update supervisor version
|
# update supervisor version
|
||||||
self._data[ATTR_HASSIO] = data["supervisor"]
|
self._data[ATTR_HASSIO] = data["supervisor"]
|
||||||
|
self._data[ATTR_DNS] = data["dns"]
|
||||||
|
|
||||||
# update Home Assistant version
|
# update Home Assistant version
|
||||||
self._data[ATTR_HOMEASSISTANT] = data["homeassistant"][machine]
|
self._data[ATTR_HOMEASSISTANT] = data["homeassistant"][machine]
|
||||||
|
@@ -5,7 +5,7 @@ import logging
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@ import re
|
|||||||
|
|
||||||
from ..exceptions import AppArmorFileError, AppArmorInvalidError
|
from ..exceptions import AppArmorFileError, AppArmorInvalidError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_PROFILE = re.compile(r"^profile ([^ ]+).*$")
|
RE_PROFILE = re.compile(r"^profile ([^ ]+).*$")
|
||||||
|
|
||||||
|
@@ -12,7 +12,7 @@ UTC = pytz.utc
|
|||||||
|
|
||||||
GEOIP_URL = "http://ip-api.com/json/"
|
GEOIP_URL = "http://ip-api.com/json/"
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# Copyright (c) Django Software Foundation and individual contributors.
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
@@ -1,79 +1,96 @@
|
|||||||
"""DBus implementation with glib."""
|
"""DBus implementation with glib."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import shlex
|
import shlex
|
||||||
import re
|
import re
|
||||||
from signal import SIGINT
|
from signal import SIGINT
|
||||||
|
from typing import Any, Dict, List, Optional, Set
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from ..exceptions import DBusFatalError, DBusParseError
|
from ..exceptions import (
|
||||||
|
DBusFatalError,
|
||||||
|
DBusParseError,
|
||||||
|
DBusInterfaceError,
|
||||||
|
DBusNotConnectedError,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Use to convert GVariant into json
|
# Use to convert GVariant into json
|
||||||
RE_GVARIANT_TYPE = re.compile(
|
RE_GVARIANT_TYPE: re.Match = re.compile(
|
||||||
r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
|
r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
|
||||||
r"string|objectpath|signature) "
|
r"string|objectpath|signature) "
|
||||||
)
|
)
|
||||||
RE_GVARIANT_VARIANT = re.compile(
|
RE_GVARIANT_VARIANT: re.Match = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(<|>)")
|
||||||
r"(?<=(?: |{|\[))<((?:'|\").*?(?:'|\")|\d+(?:\.\d+)?)>(?=(?:|]|}|,))"
|
RE_GVARIANT_STRING_ESC: re.Match = re.compile(
|
||||||
|
r"(?<=(?: |{|\[|\(|<))'[^']*?\"[^']*?'(?=(?:|]|}|,|\)|>))"
|
||||||
)
|
)
|
||||||
RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[|\())'(.*?)'(?=(?:|]|}|,|\)))")
|
RE_GVARIANT_STRING: re.Match = re.compile(
|
||||||
RE_GVARIANT_TUPLE_O = re.compile(r"\"[^\"]*?\"|(\()")
|
r"(?<=(?: |{|\[|\(|<))'(.*?)'(?=(?:|]|}|,|\)|>))"
|
||||||
RE_GVARIANT_TUPLE_C = re.compile(r"\"[^\"]*?\"|(,?\))")
|
)
|
||||||
|
RE_GVARIANT_TUPLE_O: re.Match = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(\()")
|
||||||
|
RE_GVARIANT_TUPLE_C: re.Match = re.compile(r"\"[^\"\\]*(?:\\.[^\"\\]*)*\"|(,?\))")
|
||||||
|
|
||||||
RE_MONITOR_OUTPUT = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
|
RE_MONITOR_OUTPUT: re.Match = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
|
||||||
|
|
||||||
|
# Map GDBus to errors
|
||||||
|
MAP_GDBUS_ERROR: Dict[str, Any] = {
|
||||||
|
"GDBus.Error:org.freedesktop.DBus.Error.ServiceUnknown": DBusInterfaceError,
|
||||||
|
"No such file or directory": DBusNotConnectedError,
|
||||||
|
}
|
||||||
|
|
||||||
# Commands for dbus
|
# Commands for dbus
|
||||||
INTROSPECT = "gdbus introspect --system --dest {bus} " "--object-path {object} --xml"
|
INTROSPECT: str = "gdbus introspect --system --dest {bus} " "--object-path {object} --xml"
|
||||||
CALL = (
|
CALL: str = (
|
||||||
"gdbus call --system --dest {bus} --object-path {object} "
|
"gdbus call --system --dest {bus} --object-path {object} "
|
||||||
"--method {method} {args}"
|
"--method {method} {args}"
|
||||||
)
|
)
|
||||||
MONITOR = "gdbus monitor --system --dest {bus}"
|
MONITOR: str = "gdbus monitor --system --dest {bus}"
|
||||||
|
|
||||||
DBUS_METHOD_GETALL = "org.freedesktop.DBus.Properties.GetAll"
|
DBUS_METHOD_GETALL: str = "org.freedesktop.DBus.Properties.GetAll"
|
||||||
|
|
||||||
|
|
||||||
class DBus:
|
class DBus:
|
||||||
"""DBus handler."""
|
"""DBus handler."""
|
||||||
|
|
||||||
def __init__(self, bus_name, object_path):
|
def __init__(self, bus_name: str, object_path: str) -> None:
|
||||||
"""Initialize dbus object."""
|
"""Initialize dbus object."""
|
||||||
self.bus_name = bus_name
|
self.bus_name: str = bus_name
|
||||||
self.object_path = object_path
|
self.object_path: str = object_path
|
||||||
self.methods = set()
|
self.methods: Set[str] = set()
|
||||||
self.signals = set()
|
self.signals: Set[str] = set()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def connect(bus_name, object_path):
|
async def connect(bus_name: str, object_path: str) -> DBus:
|
||||||
"""Read object data."""
|
"""Read object data."""
|
||||||
self = DBus(bus_name, object_path)
|
self = DBus(bus_name, object_path)
|
||||||
await self._init_proxy() # pylint: disable=protected-access
|
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
await self._init_proxy()
|
||||||
|
|
||||||
_LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path)
|
_LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def _init_proxy(self):
|
async def _init_proxy(self) -> None:
|
||||||
"""Read interface data."""
|
"""Read interface data."""
|
||||||
command = shlex.split(
|
command = shlex.split(
|
||||||
INTROSPECT.format(bus=self.bus_name, object=self.object_path)
|
INTROSPECT.format(bus=self.bus_name, object=self.object_path)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ask data
|
|
||||||
_LOGGER.info("Introspect %s on %s", self.bus_name, self.object_path)
|
|
||||||
data = await self._send(command)
|
|
||||||
|
|
||||||
# Parse XML
|
# Parse XML
|
||||||
|
data = await self._send(command)
|
||||||
try:
|
try:
|
||||||
xml = ET.fromstring(data)
|
xml = ET.fromstring(data)
|
||||||
except ET.ParseError as err:
|
except ET.ParseError as err:
|
||||||
_LOGGER.error("Can't parse introspect data: %s", err)
|
_LOGGER.error("Can't parse introspect data: %s", err)
|
||||||
|
_LOGGER.debug("Introspect %s on %s", self.bus_name, self.object_path)
|
||||||
raise DBusParseError() from None
|
raise DBusParseError() from None
|
||||||
|
|
||||||
# Read available methods
|
# Read available methods
|
||||||
_LOGGER.debug("data: %s", data)
|
_LOGGER.debug("Introspect XML: %s", data)
|
||||||
for interface in xml.findall("./interface"):
|
for interface in xml.findall("./interface"):
|
||||||
interface_name = interface.get("name")
|
interface_name = interface.get("name")
|
||||||
|
|
||||||
@@ -88,30 +105,36 @@ class DBus:
|
|||||||
self.signals.add(f"{interface_name}.{signal_name}")
|
self.signals.add(f"{interface_name}.{signal_name}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_gvariant(raw):
|
def parse_gvariant(raw: str) -> Any:
|
||||||
"""Parse GVariant input to python."""
|
"""Parse GVariant input to python."""
|
||||||
raw = RE_GVARIANT_TYPE.sub("", raw)
|
json_raw: str = RE_GVARIANT_TYPE.sub("", raw)
|
||||||
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
|
json_raw = RE_GVARIANT_STRING_ESC.sub(
|
||||||
raw = RE_GVARIANT_STRING.sub(r'"\1"', raw)
|
lambda x: x.group(0).replace('"', '\\"'), json_raw
|
||||||
raw = RE_GVARIANT_TUPLE_O.sub(
|
|
||||||
lambda x: x.group(0) if not x.group(1) else "[", raw
|
|
||||||
)
|
)
|
||||||
raw = RE_GVARIANT_TUPLE_C.sub(
|
json_raw = RE_GVARIANT_STRING.sub(r'"\1"', json_raw)
|
||||||
lambda x: x.group(0) if not x.group(1) else "]", raw
|
json_raw = RE_GVARIANT_VARIANT.sub(
|
||||||
|
lambda x: x.group(0) if not x.group(1) else "", json_raw
|
||||||
|
)
|
||||||
|
json_raw = RE_GVARIANT_TUPLE_O.sub(
|
||||||
|
lambda x: x.group(0) if not x.group(1) else "[", json_raw
|
||||||
|
)
|
||||||
|
json_raw = RE_GVARIANT_TUPLE_C.sub(
|
||||||
|
lambda x: x.group(0) if not x.group(1) else "]", json_raw
|
||||||
)
|
)
|
||||||
|
|
||||||
# No data
|
# No data
|
||||||
if raw.startswith("[]"):
|
if json_raw.startswith("[]"):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return json.loads(raw)
|
return json.loads(json_raw)
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
_LOGGER.error("Can't parse '%s': %s", raw, err)
|
_LOGGER.error("Can't parse '%s': %s", json_raw, err)
|
||||||
|
_LOGGER.debug("GVariant data: '%s'", raw)
|
||||||
raise DBusParseError() from None
|
raise DBusParseError() from None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def gvariant_args(args):
|
def gvariant_args(args: List[Any]) -> str:
|
||||||
"""Convert args into gvariant."""
|
"""Convert args into gvariant."""
|
||||||
gvariant = ""
|
gvariant = ""
|
||||||
for arg in args:
|
for arg in args:
|
||||||
@@ -122,11 +145,11 @@ class DBus:
|
|||||||
elif isinstance(arg, str):
|
elif isinstance(arg, str):
|
||||||
gvariant += f' "{arg}"'
|
gvariant += f' "{arg}"'
|
||||||
else:
|
else:
|
||||||
gvariant += " {}".format(str(arg))
|
gvariant += f" {arg!s}"
|
||||||
|
|
||||||
return gvariant.lstrip()
|
return gvariant.lstrip()
|
||||||
|
|
||||||
async def call_dbus(self, method, *args):
|
async def call_dbus(self, method: str, *args: List[Any]) -> str:
|
||||||
"""Call a dbus method."""
|
"""Call a dbus method."""
|
||||||
command = shlex.split(
|
command = shlex.split(
|
||||||
CALL.format(
|
CALL.format(
|
||||||
@@ -144,7 +167,7 @@ class DBus:
|
|||||||
# Parse and return data
|
# Parse and return data
|
||||||
return self.parse_gvariant(data)
|
return self.parse_gvariant(data)
|
||||||
|
|
||||||
async def get_properties(self, interface):
|
async def get_properties(self, interface: str) -> Dict[str, Any]:
|
||||||
"""Read all properties from interface."""
|
"""Read all properties from interface."""
|
||||||
try:
|
try:
|
||||||
return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0]
|
return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0]
|
||||||
@@ -152,7 +175,7 @@ class DBus:
|
|||||||
_LOGGER.error("No attributes returned for %s", interface)
|
_LOGGER.error("No attributes returned for %s", interface)
|
||||||
raise DBusFatalError from None
|
raise DBusFatalError from None
|
||||||
|
|
||||||
async def _send(self, command):
|
async def _send(self, command: List[str]) -> str:
|
||||||
"""Send command over dbus."""
|
"""Send command over dbus."""
|
||||||
# Run command
|
# Run command
|
||||||
_LOGGER.debug("Send dbus command: %s", command)
|
_LOGGER.debug("Send dbus command: %s", command)
|
||||||
@@ -170,12 +193,19 @@ class DBus:
|
|||||||
raise DBusFatalError() from None
|
raise DBusFatalError() from None
|
||||||
|
|
||||||
# Success?
|
# Success?
|
||||||
if proc.returncode != 0:
|
if proc.returncode == 0:
|
||||||
_LOGGER.error("DBus return error: %s", error)
|
return data.decode()
|
||||||
raise DBusFatalError()
|
|
||||||
|
|
||||||
# End
|
# Filter error
|
||||||
return data.decode()
|
error = error.decode()
|
||||||
|
for msg, exception in MAP_GDBUS_ERROR.items():
|
||||||
|
if msg not in error:
|
||||||
|
continue
|
||||||
|
raise exception()
|
||||||
|
|
||||||
|
# General
|
||||||
|
_LOGGER.error("DBus return error: %s", error)
|
||||||
|
raise DBusFatalError()
|
||||||
|
|
||||||
def attach_signals(self, filters=None):
|
def attach_signals(self, filters=None):
|
||||||
"""Generate a signals wrapper."""
|
"""Generate a signals wrapper."""
|
||||||
@@ -188,7 +218,7 @@ class DBus:
|
|||||||
async for signal in signals:
|
async for signal in signals:
|
||||||
return signal
|
return signal
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name: str) -> DBusCallWrapper:
|
||||||
"""Mapping to dbus method."""
|
"""Mapping to dbus method."""
|
||||||
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
||||||
|
|
||||||
@@ -196,17 +226,17 @@ class DBus:
|
|||||||
class DBusCallWrapper:
|
class DBusCallWrapper:
|
||||||
"""Wrapper a DBus interface for a call."""
|
"""Wrapper a DBus interface for a call."""
|
||||||
|
|
||||||
def __init__(self, dbus, interface):
|
def __init__(self, dbus: DBus, interface: str) -> None:
|
||||||
"""Initialize wrapper."""
|
"""Initialize wrapper."""
|
||||||
self.dbus = dbus
|
self.dbus: DBus = dbus
|
||||||
self.interface = interface
|
self.interface: str = interface
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self) -> None:
|
||||||
"""Should never be called."""
|
"""Should never be called."""
|
||||||
_LOGGER.error("DBus method %s not exists!", self.interface)
|
_LOGGER.error("DBus method %s not exists!", self.interface)
|
||||||
raise DBusFatalError()
|
raise DBusFatalError()
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name: str):
|
||||||
"""Mapping to dbus method."""
|
"""Mapping to dbus method."""
|
||||||
interface = f"{self.interface}.{name}"
|
interface = f"{self.interface}.{name}"
|
||||||
|
|
||||||
@@ -226,11 +256,11 @@ class DBusCallWrapper:
|
|||||||
class DBusSignalWrapper:
|
class DBusSignalWrapper:
|
||||||
"""Process Signals."""
|
"""Process Signals."""
|
||||||
|
|
||||||
def __init__(self, dbus, signals=None):
|
def __init__(self, dbus: DBus, signals: Optional[str] = None):
|
||||||
"""Initialize dbus signal wrapper."""
|
"""Initialize dbus signal wrapper."""
|
||||||
self.dbus = dbus
|
self.dbus: DBus = dbus
|
||||||
self._signals = signals
|
self._signals: Optional[str] = signals
|
||||||
self._proc = None
|
self._proc: Optional[asyncio.Process] = None
|
||||||
|
|
||||||
async def __aenter__(self):
|
async def __aenter__(self):
|
||||||
"""Start monitor events."""
|
"""Start monitor events."""
|
||||||
|
@@ -9,7 +9,7 @@ from voluptuous.humanize import humanize_error
|
|||||||
|
|
||||||
from ..exceptions import JsonFileError
|
from ..exceptions import JsonFileError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def write_json_file(jsonfile: Path, data: Any) -> None:
|
def write_json_file(jsonfile: Path, data: Any) -> None:
|
||||||
|
@@ -11,6 +11,7 @@ from .const import (
|
|||||||
ATTR_CHANNEL,
|
ATTR_CHANNEL,
|
||||||
ATTR_DEBUG,
|
ATTR_DEBUG,
|
||||||
ATTR_DEBUG_BLOCK,
|
ATTR_DEBUG_BLOCK,
|
||||||
|
ATTR_DNS,
|
||||||
ATTR_HASSIO,
|
ATTR_HASSIO,
|
||||||
ATTR_HASSOS,
|
ATTR_HASSOS,
|
||||||
ATTR_HASSOS_CLI,
|
ATTR_HASSOS_CLI,
|
||||||
@@ -23,6 +24,7 @@ from .const import (
|
|||||||
ATTR_PORT,
|
ATTR_PORT,
|
||||||
ATTR_PORTS,
|
ATTR_PORTS,
|
||||||
ATTR_REFRESH_TOKEN,
|
ATTR_REFRESH_TOKEN,
|
||||||
|
ATTR_SERVERS,
|
||||||
ATTR_SESSION,
|
ATTR_SESSION,
|
||||||
ATTR_SSL,
|
ATTR_SSL,
|
||||||
ATTR_TIMEZONE,
|
ATTR_TIMEZONE,
|
||||||
@@ -33,11 +35,13 @@ from .const import (
|
|||||||
CHANNEL_BETA,
|
CHANNEL_BETA,
|
||||||
CHANNEL_DEV,
|
CHANNEL_DEV,
|
||||||
CHANNEL_STABLE,
|
CHANNEL_STABLE,
|
||||||
|
DNS_SERVERS,
|
||||||
)
|
)
|
||||||
from .utils.validate import validate_timezone
|
from .utils.validate import validate_timezone
|
||||||
|
|
||||||
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||||
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
||||||
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||||
@@ -47,6 +51,8 @@ UUID_MATCH = vol.Match(r"^[0-9a-f]{32}$")
|
|||||||
SHA256 = vol.Match(r"^[0-9a-f]{64}$")
|
SHA256 = vol.Match(r"^[0-9a-f]{64}$")
|
||||||
TOKEN = vol.Match(r"^[0-9a-f]{32,256}$")
|
TOKEN = vol.Match(r"^[0-9a-f]{32,256}$")
|
||||||
LOG_LEVEL = vol.In(["debug", "info", "warning", "error", "critical"])
|
LOG_LEVEL = vol.In(["debug", "info", "warning", "error", "critical"])
|
||||||
|
DNS_URL = vol.Match(r"^dns://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
|
||||||
|
DNS_SERVER_LIST = vol.All([DNS_URL], vol.Length(max=8))
|
||||||
|
|
||||||
|
|
||||||
def validate_repository(repository):
|
def validate_repository(repository):
|
||||||
@@ -108,6 +114,7 @@ SCHEMA_UPDATER_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HASSOS): vol.Coerce(str),
|
vol.Optional(ATTR_HASSOS): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HASSOS_CLI): vol.Coerce(str),
|
vol.Optional(ATTR_HASSOS_CLI): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_DNS): vol.Coerce(str),
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -145,3 +152,12 @@ SCHEMA_INGRESS_CONFIG = vol.Schema(
|
|||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_DNS_CONFIG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_SERVERS, default=DNS_SERVERS): DNS_SERVER_LIST,
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
@@ -6,9 +6,9 @@ colorlog==4.0.2
|
|||||||
cpe==1.2.1
|
cpe==1.2.1
|
||||||
cryptography==2.7
|
cryptography==2.7
|
||||||
docker==4.0.2
|
docker==4.0.2
|
||||||
gitpython==2.1.13
|
gitpython==3.0.2
|
||||||
pytz==2019.2
|
pytz==2019.2
|
||||||
pyudev==0.21.0
|
pyudev==0.21.0
|
||||||
uvloop==0.12.2
|
uvloop==0.12.2
|
||||||
voluptuous==0.11.5
|
voluptuous==0.11.7
|
||||||
ptvsd==4.3.0
|
ptvsd==4.3.2
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
flake8==3.7.8
|
flake8==3.7.8
|
||||||
pylint==2.3.1
|
pylint==2.3.1
|
||||||
pytest==5.0.1
|
pytest==5.1.1
|
||||||
pytest-timeout==1.3.3
|
pytest-timeout==1.3.3
|
||||||
pytest-aiohttp==0.3.0
|
pytest-aiohttp==0.3.0
|
||||||
|
102
scripts/test_env.sh
Executable file
102
scripts/test_env.sh
Executable file
@@ -0,0 +1,102 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -eE
|
||||||
|
|
||||||
|
DOCKER_TIMEOUT=30
|
||||||
|
DOCKER_PID=0
|
||||||
|
|
||||||
|
|
||||||
|
function start_docker() {
|
||||||
|
local starttime
|
||||||
|
local endtime
|
||||||
|
|
||||||
|
echo "Starting docker."
|
||||||
|
dockerd 2> /dev/null &
|
||||||
|
DOCKER_PID=$!
|
||||||
|
|
||||||
|
echo "Waiting for docker to initialize..."
|
||||||
|
starttime="$(date +%s)"
|
||||||
|
endtime="$(date +%s)"
|
||||||
|
until docker info >/dev/null 2>&1; do
|
||||||
|
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||||
|
sleep 1
|
||||||
|
endtime=$(date +%s)
|
||||||
|
else
|
||||||
|
echo "Timeout while waiting for docker to come up"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "Docker was initialized"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function stop_docker() {
|
||||||
|
local starttime
|
||||||
|
local endtime
|
||||||
|
|
||||||
|
echo "Stopping in container docker..."
|
||||||
|
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||||
|
starttime="$(date +%s)"
|
||||||
|
endtime="$(date +%s)"
|
||||||
|
|
||||||
|
# Now wait for it to die
|
||||||
|
kill "$DOCKER_PID"
|
||||||
|
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||||
|
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||||
|
sleep 1
|
||||||
|
endtime=$(date +%s)
|
||||||
|
else
|
||||||
|
echo "Timeout while waiting for container docker to die"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "Your host might have been left with unreleased resources"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function build_supervisor() {
|
||||||
|
docker pull homeassistant/amd64-builder:dev
|
||||||
|
|
||||||
|
docker run --rm --privileged \
|
||||||
|
-v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \
|
||||||
|
homeassistant/amd64-builder:dev \
|
||||||
|
--supervisor 3.7-alpine3.10 --version dev \
|
||||||
|
-t /data --test --amd64 \
|
||||||
|
--no-cache --docker-hub homeassistant
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function install_cli() {
|
||||||
|
docker pull homeassistant/amd64-hassio-cli:dev
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function setup_test_env() {
|
||||||
|
mkdir -p /workspaces/test_hassio
|
||||||
|
|
||||||
|
docker run --rm --privileged \
|
||||||
|
--name hassio_supervisor \
|
||||||
|
--security-opt seccomp=unconfined \
|
||||||
|
--security-opt apparmor:unconfined \
|
||||||
|
-v /run/docker.sock:/run/docker.sock \
|
||||||
|
-v /run/dbus:/run/dbus \
|
||||||
|
-v "/workspaces/test_hassio":/data \
|
||||||
|
-v /etc/machine-id:/etc/machine-id:ro \
|
||||||
|
-e SUPERVISOR_SHARE="/workspaces/test_hassio" \
|
||||||
|
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||||
|
-e SUPERVISOR_DEV=1 \
|
||||||
|
-e HOMEASSISTANT_REPOSITORY="homeassistant/qemux86-64-homeassistant" \
|
||||||
|
homeassistant/amd64-hassio-supervisor:latest
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "Start Test-Env"
|
||||||
|
|
||||||
|
start_docker
|
||||||
|
trap "stop_docker" ERR
|
||||||
|
|
||||||
|
|
||||||
|
build_supervisor
|
||||||
|
install_cli
|
||||||
|
setup_test_env
|
||||||
|
stop_docker
|
0
script/update-frontend.sh → scripts/update-frontend.sh
Normal file → Executable file
0
script/update-frontend.sh → scripts/update-frontend.sh
Normal file → Executable file
302
tests/utils/test_gvariant_parser.py
Normal file
302
tests/utils/test_gvariant_parser.py
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
"""Test gdbus gvariant parser."""
|
||||||
|
from hassio.utils.gdbus import DBus
|
||||||
|
|
||||||
|
|
||||||
|
def test_simple_return():
|
||||||
|
"""Test Simple return value."""
|
||||||
|
raw = "(objectpath '/org/freedesktop/systemd1/job/35383',)"
|
||||||
|
|
||||||
|
# parse data
|
||||||
|
data = DBus.parse_gvariant(raw)
|
||||||
|
|
||||||
|
assert data == ["/org/freedesktop/systemd1/job/35383"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_property():
|
||||||
|
"""Test Property parsing."""
|
||||||
|
raw = "({'Hostname': <'hassio'>, 'StaticHostname': <'hassio'>, 'PrettyHostname': <''>, 'IconName': <'computer-embedded'>, 'Chassis': <'embedded'>, 'Deployment': <'production'>, 'Location': <''>, 'KernelName': <'Linux'>, 'KernelRelease': <'4.14.98-v7'>, 'KernelVersion': <'#1 SMP Sat May 11 02:17:06 UTC 2019'>, 'OperatingSystemPrettyName': <'HassOS 2.12'>, 'OperatingSystemCPEName': <'cpe:2.3:o:home_assistant:hassos:2.12:*:production:*:*:*:rpi3:*'>, 'HomeURL': <'https://hass.io/'>},)"
|
||||||
|
|
||||||
|
# parse data
|
||||||
|
data = DBus.parse_gvariant(raw)
|
||||||
|
|
||||||
|
assert data[0] == {
|
||||||
|
"Hostname": "hassio",
|
||||||
|
"StaticHostname": "hassio",
|
||||||
|
"PrettyHostname": "",
|
||||||
|
"IconName": "computer-embedded",
|
||||||
|
"Chassis": "embedded",
|
||||||
|
"Deployment": "production",
|
||||||
|
"Location": "",
|
||||||
|
"KernelName": "Linux",
|
||||||
|
"KernelRelease": "4.14.98-v7",
|
||||||
|
"KernelVersion": "#1 SMP Sat May 11 02:17:06 UTC 2019",
|
||||||
|
"OperatingSystemPrettyName": "HassOS 2.12",
|
||||||
|
"OperatingSystemCPEName": "cpe:2.3:o:home_assistant:hassos:2.12:*:production:*:*:*:rpi3:*",
|
||||||
|
"HomeURL": "https://hass.io/",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_systemd_unitlist_simple():
|
||||||
|
"""Test Systemd Unit list simple."""
|
||||||
|
raw = "([('systemd-remount-fs.service', 'Remount Root and Kernel File Systems', 'loaded', 'active', 'exited', '', objectpath '/org/freedesktop/systemd1/unit/systemd_2dremount_2dfs_2eservice', uint32 0, '', objectpath '/'), ('sys-subsystem-net-devices-veth5714b4e.device', '/sys/subsystem/net/devices/veth5714b4e', 'loaded', 'active', 'plugged', '', '/org/freedesktop/systemd1/unit/sys_2dsubsystem_2dnet_2ddevices_2dveth5714b4e_2edevice', 0, '', '/'), ('rauc.service', 'Rauc Update Service', 'loaded', 'active', 'running', '', '/org/freedesktop/systemd1/unit/rauc_2eservice', 0, '', '/'), ('mnt-data-docker-overlay2-7493c48dd99ab0e68420e3317d93711630dd55a76d4f2a21863a220031203ac2-merged.mount', '/mnt/data/docker/overlay2/7493c48dd99ab0e68420e3317d93711630dd55a76d4f2a21863a220031203ac2/merged', 'loaded', 'active', 'mounted', '', '/org/freedesktop/systemd1/unit/mnt_2ddata_2ddocker_2doverlay2_2d7493c48dd99ab0e68420e3317d93711630dd55a76d4f2a21863a220031203ac2_2dmerged_2emount', 0, '', '/'), ('hassos-hardware.target', 'HassOS hardware targets', 'loaded', 'active', 'active', '', '/org/freedesktop/systemd1/unit/hassos_2dhardware_2etarget', 0, '', '/'), ('dev-zram1.device', '/dev/zram1', 'loaded', 'active', 'plugged', 'sys-devices-virtual-block-zram1.device', '/org/freedesktop/systemd1/unit/dev_2dzram1_2edevice', 0, '', '/'), ('sys-subsystem-net-devices-hassio.device', '/sys/subsystem/net/devices/hassio', 'loaded', 'active', 'plugged', '', '/org/freedesktop/systemd1/unit/sys_2dsubsystem_2dnet_2ddevices_2dhassio_2edevice', 0, '', '/'), ('cryptsetup.target', 'cryptsetup.target', 'not-found', 'inactive', 'dead', '', '/org/freedesktop/systemd1/unit/cryptsetup_2etarget', 0, '', '/'), ('sys-devices-virtual-net-vethd256dfa.device', '/sys/devices/virtual/net/vethd256dfa', 'loaded', 'active', 'plugged', '', '/org/freedesktop/systemd1/unit/sys_2ddevices_2dvirtual_2dnet_2dvethd256dfa_2edevice', 0, '', '/'), ('network-pre.target', 'Network (Pre)', 'loaded', 'inactive', 'dead', '', '/org/freedesktop/systemd1/unit/network_2dpre_2etarget', 0, '', '/'), ('sys-devices-virtual-net-veth5714b4e.device', '/sys/devices/virtual/net/veth5714b4e', 'loaded', 'active', 'plugged', '', '/org/freedesktop/systemd1/unit/sys_2ddevices_2dvirtual_2dnet_2dveth5714b4e_2edevice', 0, '', '/'), ('sys-kernel-debug.mount', 'Kernel Debug File System', 'loaded', 'active', 'mounted', '', '/org/freedesktop/systemd1/unit/sys_2dkernel_2ddebug_2emount', 0, '', '/'), ('slices.target', 'Slices', 'loaded', 'active', 'active', '', '/org/freedesktop/systemd1/unit/slices_2etarget', 0, '', '/'), ('etc-NetworkManager-system\x2dconnections.mount', 'NetworkManager persistent system connections', 'loaded', 'active', 'mounted', '', '/org/freedesktop/systemd1/unit/etc_2dNetworkManager_2dsystem_5cx2dconnections_2emount', 0, '', '/'), ('run-docker-netns-26ede3178729.mount', '/run/docker/netns/26ede3178729', 'loaded', 'active', 'mounted', '', '/org/freedesktop/systemd1/unit/run_2ddocker_2dnetns_2d26ede3178729_2emount', 0, '', '/'), ('dev-disk-by\x2dpath-platform\x2d3f202000.mmc\x2dpart2.device', '/dev/disk/by-path/platform-3f202000.mmc-part2', 'loaded', 'active', 'plugged', 'sys-devices-platform-soc-3f202000.mmc-mmc_host-mmc0-mmc0:e624-block-mmcblk0-mmcblk0p2.device', '/org/freedesktop/systemd1/unit/dev_2ddisk_2dby_5cx2dpath_2dplatform_5cx2d3f202000_2emmc_5cx2dpart2_2edevice', 0, '', '/')],)"
|
||||||
|
|
||||||
|
# parse data
|
||||||
|
data = DBus.parse_gvariant(raw)
|
||||||
|
|
||||||
|
assert data == [
|
||||||
|
[
|
||||||
|
[
|
||||||
|
"systemd-remount-fs.service",
|
||||||
|
"Remount Root and Kernel File Systems",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"exited",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/systemd_2dremount_2dfs_2eservice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sys-subsystem-net-devices-veth5714b4e.device",
|
||||||
|
"/sys/subsystem/net/devices/veth5714b4e",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/sys_2dsubsystem_2dnet_2ddevices_2dveth5714b4e_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"rauc.service",
|
||||||
|
"Rauc Update Service",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"running",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/rauc_2eservice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"mnt-data-docker-overlay2-7493c48dd99ab0e68420e3317d93711630dd55a76d4f2a21863a220031203ac2-merged.mount",
|
||||||
|
"/mnt/data/docker/overlay2/7493c48dd99ab0e68420e3317d93711630dd55a76d4f2a21863a220031203ac2/merged",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"mounted",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/mnt_2ddata_2ddocker_2doverlay2_2d7493c48dd99ab0e68420e3317d93711630dd55a76d4f2a21863a220031203ac2_2dmerged_2emount",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"hassos-hardware.target",
|
||||||
|
"HassOS hardware targets",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"active",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/hassos_2dhardware_2etarget",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"dev-zram1.device",
|
||||||
|
"/dev/zram1",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"sys-devices-virtual-block-zram1.device",
|
||||||
|
"/org/freedesktop/systemd1/unit/dev_2dzram1_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sys-subsystem-net-devices-hassio.device",
|
||||||
|
"/sys/subsystem/net/devices/hassio",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/sys_2dsubsystem_2dnet_2ddevices_2dhassio_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"cryptsetup.target",
|
||||||
|
"cryptsetup.target",
|
||||||
|
"not-found",
|
||||||
|
"inactive",
|
||||||
|
"dead",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/cryptsetup_2etarget",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sys-devices-virtual-net-vethd256dfa.device",
|
||||||
|
"/sys/devices/virtual/net/vethd256dfa",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/sys_2ddevices_2dvirtual_2dnet_2dvethd256dfa_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"network-pre.target",
|
||||||
|
"Network (Pre)",
|
||||||
|
"loaded",
|
||||||
|
"inactive",
|
||||||
|
"dead",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/network_2dpre_2etarget",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sys-devices-virtual-net-veth5714b4e.device",
|
||||||
|
"/sys/devices/virtual/net/veth5714b4e",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/sys_2ddevices_2dvirtual_2dnet_2dveth5714b4e_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sys-kernel-debug.mount",
|
||||||
|
"Kernel Debug File System",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"mounted",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/sys_2dkernel_2ddebug_2emount",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"slices.target",
|
||||||
|
"Slices",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"active",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/slices_2etarget",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"etc-NetworkManager-system-connections.mount",
|
||||||
|
"NetworkManager persistent system connections",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"mounted",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/etc_2dNetworkManager_2dsystem_5cx2dconnections_2emount",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"run-docker-netns-26ede3178729.mount",
|
||||||
|
"/run/docker/netns/26ede3178729",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"mounted",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/run_2ddocker_2dnetns_2d26ede3178729_2emount",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"dev-disk-by-path-platform-3f202000.mmc-part2.device",
|
||||||
|
"/dev/disk/by-path/platform-3f202000.mmc-part2",
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"sys-devices-platform-soc-3f202000.mmc-mmc_host-mmc0-mmc0:e624-block-mmcblk0-mmcblk0p2.device",
|
||||||
|
"/org/freedesktop/systemd1/unit/dev_2ddisk_2dby_5cx2dpath_2dplatform_5cx2d3f202000_2emmc_5cx2dpart2_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_systemd_unitlist_complex():
|
||||||
|
"""Test Systemd Unit list simple."""
|
||||||
|
raw = "([('systemd-remount-fs.service', 'Remount Root and \"Kernel File Systems\"', 'loaded', 'active', 'exited', '', objectpath '/org/freedesktop/systemd1/unit/systemd_2dremount_2dfs_2eservice', uint32 0, '', objectpath '/'), ('sys-subsystem-net-devices-veth5714b4e.device', '/sys/subsystem/net/devices/veth5714b4e for \" is', 'loaded', 'active', 'plugged', '', '/org/freedesktop/systemd1/unit/sys_2dsubsystem_2dnet_2ddevices_2dveth5714b4e_2edevice', 0, '', '/')],)"
|
||||||
|
|
||||||
|
# parse data
|
||||||
|
data = DBus.parse_gvariant(raw)
|
||||||
|
|
||||||
|
assert data == [
|
||||||
|
[
|
||||||
|
[
|
||||||
|
"systemd-remount-fs.service",
|
||||||
|
'Remount Root and "Kernel File Systems"',
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"exited",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/systemd_2dremount_2dfs_2eservice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"sys-subsystem-net-devices-veth5714b4e.device",
|
||||||
|
'/sys/subsystem/net/devices/veth5714b4e for " is',
|
||||||
|
"loaded",
|
||||||
|
"active",
|
||||||
|
"plugged",
|
||||||
|
"",
|
||||||
|
"/org/freedesktop/systemd1/unit/sys_2dsubsystem_2dnet_2ddevices_2dveth5714b4e_2edevice",
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
"/",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_networkmanager_dns_properties():
|
||||||
|
"""Test NetworkManager DNS properties."""
|
||||||
|
raw = "({'Mode': <'default'>, 'RcManager': <'file'>, 'Configuration': <[{'nameservers': <['192.168.23.30']>, 'domains': <['syshack.local']>, 'interface': <'eth0'>, 'priority': <100>, 'vpn': <false>}]>},)"
|
||||||
|
|
||||||
|
# parse data
|
||||||
|
data = DBus.parse_gvariant(raw)
|
||||||
|
|
||||||
|
assert data == [
|
||||||
|
{
|
||||||
|
"Mode": "default",
|
||||||
|
"RcManager": "file",
|
||||||
|
"Configuration": [
|
||||||
|
{
|
||||||
|
"nameservers": ["192.168.23.30"],
|
||||||
|
"domains": ["syshack.local"],
|
||||||
|
"interface": "eth0",
|
||||||
|
"priority": 100,
|
||||||
|
"vpn": False,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
Reference in New Issue
Block a user