Merge pull request #1341 from home-assistant/dev

Release 190
This commit is contained in:
Pascal Vizeli 2019-10-22 14:57:25 +02:00 committed by GitHub
commit b3b12d35fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 244 additions and 34 deletions

View File

@ -6,11 +6,13 @@
"appPort": "9123:8123", "appPort": "9123:8123",
"runArgs": [ "runArgs": [
"-e", "-e",
"GIT_EDITOR=\"code --wait\"", "GIT_EDITOR=code --wait",
"--privileged" "--privileged"
], ],
"extensions": [ "extensions": [
"ms-python.python" "ms-python.python",
"visualstudioexptteam.vscodeintellicode",
"esbenp.prettier-vscode"
], ],
"settings": { "settings": {
"python.pythonPath": "/usr/local/bin/python", "python.pythonPath": "/usr/local/bin/python",
@ -26,4 +28,4 @@
"editor.formatOnType": true, "editor.formatOnType": true,
"files.trimTrailingWhitespace": true "files.trimTrailingWhitespace": true
} }
} }

View File

@ -12,6 +12,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def initialize_event_loop(): def initialize_event_loop():
"""Attempt to use uvloop.""" """Attempt to use uvloop."""
try: try:
# pylint: disable=import-outside-toplevel
import uvloop import uvloop
uvloop.install() uvloop.install()

View File

@ -285,6 +285,9 @@ class AddonManager(CoreSysAttributes):
for addon in needs_repair: for addon in needs_repair:
_LOGGER.info("Start repair for add-on: %s", addon.slug) _LOGGER.info("Start repair for add-on: %s", addon.slug)
await self.sys_run_in_executor(
self.sys_docker.network.stale_cleanup, addon.instance.name
)
with suppress(DockerAPIError, KeyError): with suppress(DockerAPIError, KeyError):
# Need pull a image again # Need pull a image again
@ -293,7 +296,7 @@ class AddonManager(CoreSysAttributes):
continue continue
# Need local lookup # Need local lookup
elif addon.need_build and not addon.is_detached: if addon.need_build and not addon.is_detached:
store = self.store[addon.slug] store = self.store[addon.slug]
# If this add-on is available for rebuild # If this add-on is available for rebuild
if addon.version == store.version: if addon.version == store.version:

View File

@ -51,6 +51,7 @@ from ..exceptions import (
) )
from ..utils.apparmor import adjust_profile from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file from ..utils.json import read_json_file, write_json_file
from ..utils.tar import exclude_filter, secure_path
from .model import AddonModel, Data from .model import AddonModel, Data
from .utils import remove_data from .utils import remove_data
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
@ -525,7 +526,7 @@ class Addon(AddonModel):
async def snapshot(self, tar_file: tarfile.TarFile) -> None: async def snapshot(self, tar_file: tarfile.TarFile) -> None:
"""Snapshot state of an add-on.""" """Snapshot state of an add-on."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp: with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
# store local image # store local image
if self.need_build: if self.need_build:
try: try:
@ -560,8 +561,15 @@ class Addon(AddonModel):
def _write_tarfile(): def _write_tarfile():
"""Write tar inside loop.""" """Write tar inside loop."""
with tar_file as snapshot: with tar_file as snapshot:
# Snapshot system
snapshot.add(temp, arcname=".") snapshot.add(temp, arcname=".")
snapshot.add(self.path_data, arcname="data")
# Snapshot data
snapshot.add(
self.path_data,
arcname="data",
filter=exclude_filter(self.snapshot_exclude),
)
try: try:
_LOGGER.info("Build snapshot for add-on %s", self.slug) _LOGGER.info("Build snapshot for add-on %s", self.slug)
@ -574,12 +582,12 @@ class Addon(AddonModel):
async def restore(self, tar_file: tarfile.TarFile) -> None: async def restore(self, tar_file: tarfile.TarFile) -> None:
"""Restore state of an add-on.""" """Restore state of an add-on."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp: with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
# extract snapshot # extract snapshot
def _extract_tarfile(): def _extract_tarfile():
"""Extract tar snapshot.""" """Extract tar snapshot."""
with tar_file as snapshot: with tar_file as snapshot:
snapshot.extractall(path=Path(temp)) snapshot.extractall(path=Path(temp), members=secure_path(snapshot))
try: try:
await self.sys_run_in_executor(_extract_tarfile) await self.sys_run_in_executor(_extract_tarfile)
@ -640,7 +648,7 @@ class Addon(AddonModel):
# Restore data # Restore data
def _restore_data(): def _restore_data():
"""Restore data.""" """Restore data."""
shutil.copytree(str(Path(temp, "data")), str(self.path_data)) shutil.copytree(Path(temp, "data"), self.path_data)
_LOGGER.info("Restore data for addon %s", self.slug) _LOGGER.info("Restore data for addon %s", self.slug)
if self.path_data.is_dir(): if self.path_data.is_dir():

View File

@ -47,6 +47,7 @@ from ..const import (
ATTR_SCHEMA, ATTR_SCHEMA,
ATTR_SERVICES, ATTR_SERVICES,
ATTR_SLUG, ATTR_SLUG,
ATTR_SNAPSHOT_EXCLUDE,
ATTR_STARTUP, ATTR_STARTUP,
ATTR_STDIN, ATTR_STDIN,
ATTR_TIMEOUT, ATTR_TIMEOUT,
@ -324,6 +325,11 @@ class AddonModel(CoreSysAttributes):
"""Return Hass.io role for API.""" """Return Hass.io role for API."""
return self.data[ATTR_HASSIO_ROLE] return self.data[ATTR_HASSIO_ROLE]
@property
def snapshot_exclude(self) -> List[str]:
"""Return Exclude list for snapshot."""
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
@property @property
def with_stdin(self) -> bool: def with_stdin(self) -> bool:
"""Return True if the add-on access use stdin input.""" """Return True if the add-on access use stdin input."""

View File

@ -62,6 +62,7 @@ from ..const import (
ATTR_SCHEMA, ATTR_SCHEMA,
ATTR_SERVICES, ATTR_SERVICES,
ATTR_SLUG, ATTR_SLUG,
ATTR_SNAPSHOT_EXCLUDE,
ATTR_SQUASH, ATTR_SQUASH,
ATTR_STARTUP, ATTR_STARTUP,
ATTR_STATE, ATTR_STATE,
@ -214,6 +215,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(), vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)], vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service], vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
vol.Required(ATTR_OPTIONS): dict, vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): vol.Any( vol.Required(ATTR_SCHEMA): vol.Any(
vol.Schema( vol.Schema(

View File

@ -236,6 +236,7 @@ def supervisor_debugger(coresys: CoreSys) -> None:
"""Setup debugger if needed.""" """Setup debugger if needed."""
if not coresys.config.debug: if not coresys.config.debug:
return return
# pylint: disable=import-outside-toplevel
import ptvsd import ptvsd
_LOGGER.info("Initialize Hass.io debugger") _LOGGER.info("Initialize Hass.io debugger")

View File

@ -2,7 +2,7 @@
from pathlib import Path from pathlib import Path
from ipaddress import ip_network from ipaddress import ip_network
HASSIO_VERSION = "189" HASSIO_VERSION = "190"
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons" URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
@ -221,6 +221,7 @@ ATTR_SERVERS = "servers"
ATTR_LOCALS = "locals" ATTR_LOCALS = "locals"
ATTR_UDEV = "udev" ATTR_UDEV = "udev"
ATTR_VALUE = "value" ATTR_VALUE = "value"
ATTR_SNAPSHOT_EXCLUDE = "snapshot_exclude"
PROVIDE_SERVICE = "provide" PROVIDE_SERVICE = "provide"
NEED_SERVICE = "need" NEED_SERVICE = "need"

View File

@ -31,8 +31,8 @@ class Message:
addon: str = attr.ib() addon: str = attr.ib()
service: str = attr.ib() service: str = attr.ib()
config: Dict[str, Any] = attr.ib(cmp=False) config: Dict[str, Any] = attr.ib(eq=False)
uuid: UUID = attr.ib(factory=lambda: uuid4().hex, cmp=False) uuid: UUID = attr.ib(factory=lambda: uuid4().hex, eq=False)
class Discovery(CoreSysAttributes, JsonConfig): class Discovery(CoreSysAttributes, JsonConfig):

View File

@ -0,0 +1,11 @@
"""Discovery service for Almond."""
import voluptuous as vol
from hassio.validate import NETWORK_PORT
from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT}
)

View File

@ -0,0 +1,11 @@
"""Discovery service for Home Panel."""
import voluptuous as vol
from hassio.validate import NETWORK_PORT
from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT}
)

View File

@ -178,3 +178,10 @@ class DockerAPI:
_LOGGER.debug("Volumes prune: %s", output) _LOGGER.debug("Volumes prune: %s", output)
except docker.errors.APIError as err: except docker.errors.APIError as err:
_LOGGER.warning("Error for volumes prune: %s", err) _LOGGER.warning("Error for volumes prune: %s", err)
_LOGGER.info("Prune stale networks")
try:
output = self.docker.api.prune_networks()
_LOGGER.debug("Networks prune: %s", output)
except docker.errors.APIError as err:
_LOGGER.warning("Error for networks prune: %s", err)

View File

@ -1,4 +1,5 @@
"""Internal network manager for Hass.io.""" """Internal network manager for Hass.io."""
from contextlib import suppress
from ipaddress import IPv4Address from ipaddress import IPv4Address
import logging import logging
from typing import List, Optional from typing import List, Optional
@ -107,3 +108,11 @@ class DockerNetwork:
except docker.errors.APIError as err: except docker.errors.APIError as err:
_LOGGER.warning("Can't disconnect container from default: %s", err) _LOGGER.warning("Can't disconnect container from default: %s", err)
raise DockerAPIError() from None raise DockerAPIError() from None
def stale_cleanup(self, container_name: str):
"""Remove force a container from Network.
Fix: https://github.com/moby/moby/issues/23302
"""
with suppress(docker.errors.APIError):
self.network.disconnect(container_name, force=True)

View File

@ -575,7 +575,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
migration_progress = True migration_progress = True
_LOGGER.info("Home Assistant record migration in progress") _LOGGER.info("Home Assistant record migration in progress")
continue continue
elif migration_progress: if migration_progress:
migration_progress = False # Reset start time migration_progress = False # Reset start time
start_time = time.monotonic() start_time = time.monotonic()
_LOGGER.info("Home Assistant record migration done") _LOGGER.info("Home Assistant record migration done")
@ -586,7 +586,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
pip_progress = True pip_progress = True
_LOGGER.info("Home Assistant pip installation in progress") _LOGGER.info("Home Assistant pip installation in progress")
continue continue
elif pip_progress: if pip_progress:
pip_progress = False # Reset start time pip_progress = False # Reset start time
start_time = time.monotonic() start_time = time.monotonic()
_LOGGER.info("Home Assistant pip installation done") _LOGGER.info("Home Assistant pip installation done")
@ -605,6 +605,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
return return
_LOGGER.info("Repair Home Assistant %s", self.version) _LOGGER.info("Repair Home Assistant %s", self.version)
await self.sys_run_in_executor(
self.sys_docker.network.stale_cleanup, self.instance.name
)
# Pull image
try: try:
await self.instance.install(self.version) await self.instance.install(self.version)
except DockerAPIError: except DockerAPIError:

View File

@ -11,8 +11,13 @@ from ..coresys import CoreSysAttributes
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=invalid-name
DefaultConfig = attr.make_class("DefaultConfig", ["input", "output"]) @attr.s()
class DefaultConfig:
"""Default config input/output ALSA channel."""
input: str = attr.ib()
output: str = attr.ib()
AUDIODB_JSON: Path = Path(__file__).parents[1].joinpath("data/audiodb.json") AUDIODB_JSON: Path = Path(__file__).parents[1].joinpath("data/audiodb.json")

View File

@ -91,9 +91,9 @@ class ServiceManager(CoreSysAttributes):
class ServiceInfo: class ServiceInfo:
"""Represent a single Service.""" """Represent a single Service."""
name = attr.ib(type=str) name: str = attr.ib()
description = attr.ib(type=str) description: str = attr.ib()
state = attr.ib(type=str) state: str = attr.ib()
@staticmethod @staticmethod
def read_from(unit): def read_from(unit):

View File

@ -41,7 +41,7 @@ from ..const import (
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import AddonsError from ..exceptions import AddonsError
from ..utils.json import write_json_file from ..utils.json import write_json_file
from ..utils.tar import SecureTarFile from ..utils.tar import SecureTarFile, secure_path
from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder
from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT
@ -248,7 +248,7 @@ class Snapshot(CoreSysAttributes):
def _extract_snapshot(): def _extract_snapshot():
"""Extract a snapshot.""" """Extract a snapshot."""
with tarfile.open(self.tarfile, "r:") as tar: with tarfile.open(self.tarfile, "r:") as tar:
tar.extractall(path=self._tmp.name) tar.extractall(path=self._tmp.name, members=secure_path(tar))
await self.sys_run_in_executor(_extract_snapshot) await self.sys_run_in_executor(_extract_snapshot)
@ -396,7 +396,7 @@ class Snapshot(CoreSysAttributes):
try: try:
_LOGGER.info("Restore folder %s", name) _LOGGER.info("Restore folder %s", name)
with SecureTarFile(tar_name, "r", key=self._key) as tar_file: with SecureTarFile(tar_name, "r", key=self._key) as tar_file:
tar_file.extractall(path=origin_dir) tar_file.extractall(path=origin_dir, members=tar_file)
_LOGGER.info("Restore folder %s done", name) _LOGGER.info("Restore folder %s done", name)
except (tarfile.TarError, OSError) as err: except (tarfile.TarError, OSError) as err:
_LOGGER.warning("Can't restore folder %s: %s", name, err) _LOGGER.warning("Can't restore folder %s: %s", name, err)

View File

@ -42,7 +42,7 @@ def remove_folder(folder):
for obj in folder.iterdir(): for obj in folder.iterdir():
try: try:
if obj.is_dir(): if obj.is_dir():
shutil.rmtree(str(obj), ignore_errors=True) shutil.rmtree(obj, ignore_errors=True)
else: else:
obj.unlink() obj.unlink()
except (OSError, shutil.Error): except (OSError, shutil.Error):

View File

@ -137,7 +137,7 @@ class GitRepo(CoreSysAttributes):
"""Log error.""" """Log error."""
_LOGGER.warning("Can't remove %s", path) _LOGGER.warning("Can't remove %s", path)
shutil.rmtree(str(self.path), onerror=log_err) shutil.rmtree(self.path, onerror=log_err)
class GitRepoHassIO(GitRepo): class GitRepoHassIO(GitRepo):

View File

@ -1,19 +1,22 @@
"""Tarfile fileobject handler for encrypted files.""" """Tarfile fileobject handler for encrypted files."""
import hashlib import hashlib
import logging
import os import os
from pathlib import Path from pathlib import Path
import tarfile import tarfile
from typing import IO, Optional from typing import IO, Callable, Generator, List, Optional
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import padding from cryptography.hazmat.primitives import padding
from cryptography.hazmat.primitives.ciphers import ( from cryptography.hazmat.primitives.ciphers import (
CipherContext,
Cipher, Cipher,
CipherContext,
algorithms, algorithms,
modes, modes,
) )
_LOGGER: logging.Logger = logging.getLogger(__name__)
BLOCK_SIZE = 16 BLOCK_SIZE = 16
BLOCK_SIZE_BITS = 128 BLOCK_SIZE_BITS = 128
@ -111,3 +114,39 @@ def _generate_iv(key: bytes, salt: bytes) -> bytes:
for _ in range(100): for _ in range(100):
temp_iv = hashlib.sha256(temp_iv).digest() temp_iv = hashlib.sha256(temp_iv).digest()
return temp_iv[:16] return temp_iv[:16]
def secure_path(tar: tarfile.TarFile) -> Generator[tarfile.TarInfo, None, None]:
"""Security safe check of path.
Prevent ../ or absolut paths
"""
for member in tar:
file_path = Path(member.name)
try:
assert not file_path.is_absolute()
Path("/fake", file_path).resolve().relative_to("/fake")
except (ValueError, RuntimeError, AssertionError):
_LOGGER.warning("Issue with file %s", file_path)
continue
else:
yield member
def exclude_filter(
exclude_list: List[str]
) -> Callable[[tarfile.TarInfo], Optional[tarfile.TarInfo]]:
"""Create callable filter function to check TarInfo for add."""
def my_filter(tar: tarfile.TarInfo) -> Optional[tarfile.TarInfo]:
"""Custom exclude filter."""
file_path = Path(tar.name)
for exclude in exclude_list:
if not file_path.match(exclude):
continue
_LOGGER.debug("Ignore %s because of %s", file_path, exclude)
return None
return tar
return my_filter

View File

@ -1,14 +1,14 @@
aiohttp==3.6.1 aiohttp==3.6.2
async_timeout==3.0.1 async_timeout==3.0.1
attrs==19.1.0 attrs==19.3.0
cchardet==2.1.4 cchardet==2.1.4
colorlog==4.0.2 colorlog==4.0.2
cpe==1.2.1 cpe==1.2.1
cryptography==2.7 cryptography==2.8
docker==4.0.2 docker==4.1.0
gitpython==3.0.2 gitpython==3.0.4
packaging==19.2 packaging==19.2
pytz==2019.2 pytz==2019.3
pyudev==0.21.0 pyudev==0.21.0
ruamel.yaml==0.15.100 ruamel.yaml==0.15.100
uvloop==0.13.0 uvloop==0.13.0

View File

@ -1,5 +1,5 @@
flake8==3.7.8 flake8==3.7.8
pylint==2.3.1 pylint==2.4.3
pytest==5.1.3 pytest==5.2.1
pytest-timeout==1.3.3 pytest-timeout==1.3.3
pytest-aiohttp==0.3.0 pytest-aiohttp==0.3.0

View File

@ -0,0 +1,19 @@
"""Test adguard discovery."""
import voluptuous as vol
import pytest
from hassio.discovery.validate import valid_discovery_config
def test_good_config():
"""Test good deconz config."""
valid_discovery_config("almond", {"host": "test", "port": 3812})
def test_bad_config():
"""Test good adguard config."""
with pytest.raises(vol.Invalid):
valid_discovery_config("almond", {"host": "test"})

View File

@ -0,0 +1,19 @@
"""Test adguard discovery."""
import voluptuous as vol
import pytest
from hassio.discovery.validate import valid_discovery_config
def test_good_config():
"""Test good deconz config."""
valid_discovery_config("home_panel", {"host": "test", "port": 3812})
def test_bad_config():
"""Test good adguard config."""
with pytest.raises(vol.Invalid):
valid_discovery_config("home_panel", {"host": "test"})

View File

@ -0,0 +1,61 @@
"""Test Tarfile functions."""
import attr
import pytest
from hassio.utils.tar import secure_path, exclude_filter
@attr.s
class TarInfo:
"""Fake TarInfo"""
name: str = attr.ib()
def test_secure_path():
"""Test Secure Path."""
test_list = [
TarInfo("test.txt"),
TarInfo("data/xy.blob"),
TarInfo("bla/blu/ble"),
TarInfo("data/../xy.blob"),
]
assert test_list == list(secure_path(test_list))
def test_not_secure_path():
"""Test Not secure path."""
test_list = [
TarInfo("/test.txt"),
TarInfo("data/../../xy.blob"),
TarInfo("/bla/blu/ble"),
]
assert [] == list(secure_path(test_list))
def test_exclude_filter_good():
"""Test exclude filter."""
filter_funct = exclude_filter(["not/match", "/dev/xy"])
test_list = [
TarInfo("test.txt"),
TarInfo("data/xy.blob"),
TarInfo("bla/blu/ble"),
TarInfo("data/../xy.blob"),
]
assert test_list == [filter_funct(result) for result in test_list]
def test_exclude_filter_bad():
"""Test exclude filter."""
filter_funct = exclude_filter(["*.txt", "data/*", "bla/blu/ble"])
test_list = [
TarInfo("test.txt"),
TarInfo("data/xy.blob"),
TarInfo("bla/blu/ble"),
TarInfo("data/test_files/kk.txt"),
]
for info in [filter_funct(result) for result in test_list]:
assert info is None