mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-24 09:36:31 +00:00
commit
a1910d4135
16
.github/main.workflow
vendored
Normal file
16
.github/main.workflow
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
workflow "tox" {
|
||||
on = "push"
|
||||
resolves = [
|
||||
"Python 3.7",
|
||||
"Json Files",
|
||||
]
|
||||
}
|
||||
|
||||
action "Python 3.7" {
|
||||
uses = "home-assistant/actions/py37-tox@master"
|
||||
}
|
||||
|
||||
action "Json Files" {
|
||||
uses = "home-assistant/actions/jq@master"
|
||||
args = "**/*.json"
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
sudo: true
|
||||
dist: xenial
|
||||
install: pip install -U tox
|
||||
language: python
|
||||
python: 3.7
|
||||
script: tox
|
2
API.md
2
API.md
@ -22,7 +22,7 @@ On success / Code 200:
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
@ -29,7 +28,7 @@ from ..const import (
|
||||
STATE_STARTED, STATE_STOPPED)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..exceptions import HostAppArmorError
|
||||
from ..exceptions import HostAppArmorError, JsonFileError
|
||||
from ..utils import create_token
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
@ -61,11 +60,6 @@ class Addon(CoreSysAttributes):
|
||||
return
|
||||
await self.instance.attach()
|
||||
|
||||
# NOTE: Can't be removed after soon
|
||||
if ATTR_IMAGE not in self._data.user[self._id]:
|
||||
self._data.user[self._id][ATTR_IMAGE] = self.image_name
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug/id of add-on."""
|
||||
@ -511,19 +505,20 @@ class Addon(CoreSysAttributes):
|
||||
def image(self):
|
||||
"""Return image name of add-on."""
|
||||
if self.is_installed:
|
||||
# NOTE: cleanup
|
||||
if ATTR_IMAGE in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_IMAGE]
|
||||
return self.image_name
|
||||
return self._data.user[self._id].get(ATTR_IMAGE)
|
||||
return self.image_next
|
||||
|
||||
@property
|
||||
def image_name(self):
|
||||
def image_next(self):
|
||||
"""Return image name for install/update."""
|
||||
if self.is_detached:
|
||||
addon_data = self._data.system.get(self._id)
|
||||
else:
|
||||
addon_data = self._data.cache.get(self._id)
|
||||
return self._get_image(addon_data)
|
||||
|
||||
def _get_image(self, addon_data) -> str:
|
||||
"""Generate image name from data."""
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
arch = self.sys_arch.match(addon_data[ATTR_ARCH])
|
||||
@ -616,8 +611,8 @@ class Addon(CoreSysAttributes):
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Add-on %s have wrong options: %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Add-on %s can't write options: %s", self._id, err)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self._id)
|
||||
else:
|
||||
return True
|
||||
|
||||
@ -725,10 +720,10 @@ class Addon(CoreSysAttributes):
|
||||
await self._install_apparmor()
|
||||
|
||||
if not await self.instance.install(
|
||||
self.last_version, self.image_name):
|
||||
self.last_version, self.image_next):
|
||||
return False
|
||||
|
||||
self._set_install(self.image_name, self.last_version)
|
||||
self._set_install(self.image_next, self.last_version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
@ -806,9 +801,9 @@ class Addon(CoreSysAttributes):
|
||||
return False
|
||||
|
||||
if not await self.instance.update(
|
||||
self.last_version, self.image_name):
|
||||
self.last_version, self.image_next):
|
||||
return False
|
||||
self._set_update(self.image_name, self.last_version)
|
||||
self._set_update(self.image_next, self.last_version)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
@ -892,8 +887,8 @@ class Addon(CoreSysAttributes):
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(Path(temp, 'addon.json'), data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Can't save meta for %s", self._id)
|
||||
return False
|
||||
|
||||
# Store AppArmor Profile
|
||||
@ -940,8 +935,8 @@ class Addon(CoreSysAttributes):
|
||||
# Read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, 'addon.json'))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json: %s", err)
|
||||
except JsonFileError:
|
||||
return False
|
||||
|
||||
# Validate
|
||||
try:
|
||||
@ -953,7 +948,8 @@ class Addon(CoreSysAttributes):
|
||||
|
||||
# Restore data or reload add-on
|
||||
_LOGGER.info("Restore config for addon %s", self._id)
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM], self.image_name)
|
||||
restore_image = self._get_image(data[ATTR_SYSTEM])
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM], restore_image)
|
||||
|
||||
# Check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
@ -964,7 +960,7 @@ class Addon(CoreSysAttributes):
|
||||
if image_file.is_file():
|
||||
await self.instance.import_image(image_file, version)
|
||||
else:
|
||||
if await self.instance.install(version, self.image_name):
|
||||
if await self.instance.install(version, restore_image):
|
||||
await self.instance.cleanup()
|
||||
else:
|
||||
await self.instance.stop()
|
||||
|
@ -1,19 +1,25 @@
|
||||
"""Init file for Hass.io add-on data."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import (
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
ATTR_LOCATON,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SLUG,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
FILE_HASSIO_ADDONS,
|
||||
REPOSITORY_CORE,
|
||||
REPOSITORY_LOCAL,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import JsonFileError
|
||||
from ..utils.json import JsonConfig, read_json_file
|
||||
from .utils import extract_hash_from_path
|
||||
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -54,12 +60,10 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
self._repositories = {}
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||
|
||||
# read local repository
|
||||
self._read_addons_folder(
|
||||
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||
|
||||
# add built-in repositories information
|
||||
self._set_builtin_repositories()
|
||||
@ -76,15 +80,12 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(
|
||||
read_json_file(repository_file)
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning(
|
||||
"Can't read repository information from %s", repository_file
|
||||
)
|
||||
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||
return
|
||||
@ -98,23 +99,21 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
for addon in path.glob("**/config.json"):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
|
||||
continue
|
||||
|
||||
# validate
|
||||
try:
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s: %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
_LOGGER.warning(
|
||||
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
|
||||
)
|
||||
continue
|
||||
|
||||
# Generate slug
|
||||
addon_slug = "{}_{}".format(
|
||||
repository, addon_config[ATTR_SLUG])
|
||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
||||
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
@ -126,14 +125,12 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
try:
|
||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
||||
builtin_data = read_json_file(builtin_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read built-in json: %s", err)
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read built-in json")
|
||||
return
|
||||
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
|
||||
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]
|
||||
|
@ -156,7 +156,7 @@ SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
|
@ -3,22 +3,27 @@
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
|
@ -1,11 +1,10 @@
|
||||
"""Handle Arch for underlay maschine/platforms."""
|
||||
import json
|
||||
import logging
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from .coresys import CoreSysAttributes, CoreSys
|
||||
from .exceptions import HassioArchNotFound
|
||||
from .exceptions import HassioArchNotFound, JsonFileError
|
||||
from .utils.json import read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -38,10 +37,9 @@ class CpuArch(CoreSysAttributes):
|
||||
async def load(self) -> None:
|
||||
"""Load data and initialize default arch."""
|
||||
try:
|
||||
arch_file = Path(__file__).parent.joinpath("arch.json")
|
||||
arch_data = read_json_file(arch_file)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning("Can't read arch json: %s", err)
|
||||
arch_data = read_json_file(Path(__file__).parent.joinpath("arch.json"))
|
||||
except JsonFileError:
|
||||
_LOGGER.warning("Can't read arch json")
|
||||
return
|
||||
|
||||
# Evaluate current CPU/Platform
|
||||
|
@ -2,7 +2,7 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = "145"
|
||||
HASSIO_VERSION = "146"
|
||||
|
||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||
|
@ -137,3 +137,10 @@ class AppArmorFileError(AppArmorError):
|
||||
|
||||
class AppArmorInvalidError(AppArmorError):
|
||||
"""AppArmor profile validate error."""
|
||||
|
||||
|
||||
# util/json
|
||||
|
||||
|
||||
class JsonFileError(HassioError):
|
||||
"""Invalid json file."""
|
||||
|
@ -1,82 +1,89 @@
|
||||
"""Tools file for Hass.io."""
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..exceptions import JsonFileError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def write_json_file(jsonfile, data):
|
||||
def write_json_file(jsonfile: Path, data: Any) -> None:
|
||||
"""Write a JSON file."""
|
||||
json_str = json.dumps(data, indent=2)
|
||||
with jsonfile.open('w') as conf_file:
|
||||
conf_file.write(json_str)
|
||||
try:
|
||||
jsonfile.write_text(json.dumps(data, indent=2))
|
||||
except (OSError, ValueError, TypeError) as err:
|
||||
_LOGGER.error("Can't write %s: %s", jsonfile, err)
|
||||
raise JsonFileError() from None
|
||||
|
||||
|
||||
def read_json_file(jsonfile):
|
||||
def read_json_file(jsonfile: Path) -> Any:
|
||||
"""Read a JSON file and return a dict."""
|
||||
with jsonfile.open('r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
try:
|
||||
return json.loads(jsonfile.read_text())
|
||||
except (OSError, ValueError, TypeError, UnicodeDecodeError) as err:
|
||||
_LOGGER.error("Can't read json from %s: %s", jsonfile, err)
|
||||
raise JsonFileError() from None
|
||||
|
||||
|
||||
class JsonConfig:
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, json_file, schema):
|
||||
def __init__(self, json_file: Path, schema: vol.Schema):
|
||||
"""Initialize hass object."""
|
||||
self._file = json_file
|
||||
self._schema = schema
|
||||
self._data = {}
|
||||
self._file: Path = json_file
|
||||
self._schema: vol.Schema = schema
|
||||
self._data: Dict[str, Any] = {}
|
||||
|
||||
self.read_data()
|
||||
|
||||
def reset_data(self):
|
||||
def reset_data(self) -> None:
|
||||
"""Reset JSON file to default."""
|
||||
try:
|
||||
self._data = self._schema({})
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't reset %s: %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
_LOGGER.error(
|
||||
"Can't reset %s: %s", self._file, humanize_error(self._data, ex)
|
||||
)
|
||||
|
||||
def read_data(self):
|
||||
def read_data(self) -> None:
|
||||
"""Read JSON file & validate."""
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
except JsonFileError:
|
||||
self._data = {}
|
||||
|
||||
# Validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse %s: %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
_LOGGER.error(
|
||||
"Can't parse %s: %s", self._file, humanize_error(self._data, ex)
|
||||
)
|
||||
|
||||
# Reset data to default
|
||||
_LOGGER.warning("Reset %s to default", self._file)
|
||||
self._data = self._schema({})
|
||||
|
||||
def save_data(self):
|
||||
def save_data(self) -> None:
|
||||
"""Store data to configuration file."""
|
||||
# Validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse data: %s",
|
||||
humanize_error(self._data, ex))
|
||||
_LOGGER.error("Can't parse data: %s", humanize_error(self._data, ex))
|
||||
|
||||
# Load last valid data
|
||||
_LOGGER.warning("Reset %s to last version", self._file)
|
||||
self.read_data()
|
||||
return
|
||||
|
||||
# write
|
||||
try:
|
||||
write_json_file(self._file, self._data)
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error(
|
||||
"Can't store configuration in %s: %s", self._file, err)
|
||||
else:
|
||||
# write
|
||||
try:
|
||||
write_json_file(self._file, self._data)
|
||||
except JsonFileError:
|
||||
pass
|
||||
|
@ -1,5 +1,5 @@
|
||||
flake8==3.7.5
|
||||
pylint==2.2.2
|
||||
pytest==4.1.1
|
||||
flake8==3.7.7
|
||||
pylint==2.3.0
|
||||
pytest==4.3.0
|
||||
pytest-timeout==1.3.3
|
||||
pytest-aiohttp==0.3.0
|
||||
pytest-aiohttp==0.3.0
|
||||
|
@ -46,8 +46,8 @@ async def test_raspberrypi2_arch(coresys, sys_machine):
|
||||
sys_machine.return_value = "raspberrypi2"
|
||||
await coresys.arch.load()
|
||||
|
||||
assert coresys.arch.default == "armhf"
|
||||
assert coresys.arch.supported == ["armhf"]
|
||||
assert coresys.arch.default == "armv7"
|
||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||
|
||||
|
||||
async def test_raspberrypi3_arch(coresys, sys_machine):
|
||||
@ -55,8 +55,8 @@ async def test_raspberrypi3_arch(coresys, sys_machine):
|
||||
sys_machine.return_value = "raspberrypi3"
|
||||
await coresys.arch.load()
|
||||
|
||||
assert coresys.arch.default == "armhf"
|
||||
assert coresys.arch.supported == ["armhf"]
|
||||
assert coresys.arch.default == "armv7"
|
||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||
|
||||
|
||||
async def test_raspberrypi3_64_arch(coresys, sys_machine):
|
||||
@ -65,7 +65,7 @@ async def test_raspberrypi3_64_arch(coresys, sys_machine):
|
||||
await coresys.arch.load()
|
||||
|
||||
assert coresys.arch.default == "aarch64"
|
||||
assert coresys.arch.supported == ["aarch64", "armhf"]
|
||||
assert coresys.arch.supported == ["aarch64", "armv7", "armhf"]
|
||||
|
||||
|
||||
async def test_tinker_arch(coresys, sys_machine):
|
||||
@ -73,8 +73,8 @@ async def test_tinker_arch(coresys, sys_machine):
|
||||
sys_machine.return_value = "tinker"
|
||||
await coresys.arch.load()
|
||||
|
||||
assert coresys.arch.default == "armhf"
|
||||
assert coresys.arch.supported == ["armhf"]
|
||||
assert coresys.arch.default == "armv7"
|
||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||
|
||||
|
||||
async def test_odroid_c2_arch(coresys, sys_machine):
|
||||
@ -91,8 +91,8 @@ async def test_odroid_xu_arch(coresys, sys_machine):
|
||||
sys_machine.return_value = "odroid-xu"
|
||||
await coresys.arch.load()
|
||||
|
||||
assert coresys.arch.default == "armhf"
|
||||
assert coresys.arch.supported == ["armhf"]
|
||||
assert coresys.arch.default == "armv7"
|
||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||
|
||||
|
||||
async def test_orangepi_prime_arch(coresys, sys_machine):
|
||||
|
Loading…
x
Reference in New Issue
Block a user