mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-09 18:26:30 +00:00
Replace pycrpytodome with cryptocraphy (#923)
* Replace pycrpytodome with cryptocraphy * Fix typing * fix typing * Fix lints * Fix build * Add musl libc * Fix lint * fix lint * Fix algo * Add more typing fix crypto imports v2 * Fix padding
This commit is contained in:
parent
b6df37628d
commit
7f074142bf
10
Dockerfile
10
Dockerfile
@ -3,6 +3,9 @@ FROM $BUILD_FROM
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
@ -12,8 +15,11 @@ RUN apk add --no-cache \
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
make \
|
||||
g++ \
|
||||
openssl-dev \
|
||||
libffi-dev \
|
||||
musl-dev \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Util add-ons functions."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
@ -7,10 +8,18 @@ from pathlib import Path
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (PRIVILEGED_DAC_READ_SEARCH, PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_SYS_PTRACE, PRIVILEGED_SYS_RAWIO, ROLE_ADMIN,
|
||||
ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE)
|
||||
from ..const import (
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
ROLE_ADMIN,
|
||||
ROLE_MANAGER,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addon import Addon
|
||||
@ -38,16 +47,17 @@ def rating_security(addon: Addon) -> int:
|
||||
rating += 1
|
||||
|
||||
# Privileged options
|
||||
# pylint: disable=bad-continuation
|
||||
if any(
|
||||
privilege in addon.privileged for privilege in (
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
)):
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
)
|
||||
):
|
||||
rating += -1
|
||||
|
||||
# API Hass.io role
|
||||
@ -107,7 +117,8 @@ async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL)
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
|
@ -144,8 +144,7 @@ class APIProxy(CoreSysAttributes):
|
||||
return client
|
||||
|
||||
# Renew the Token is invalid
|
||||
if (data.get('type') == 'invalid_auth' and
|
||||
self.sys_homeassistant.refresh_token):
|
||||
if data.get('type') == 'invalid_auth' and self.sys_homeassistant.refresh_token:
|
||||
self.sys_homeassistant.access_token = None
|
||||
return await self._websocket_client()
|
||||
|
||||
@ -175,8 +174,7 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
hassio_token = (response.get('api_password') or
|
||||
response.get('access_token'))
|
||||
hassio_token = response.get('api_password') or response.get('access_token')
|
||||
addon = self.sys_addons.from_token(hassio_token)
|
||||
|
||||
if not addon or not addon.access_homeassistant_api:
|
||||
|
@ -369,8 +369,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
|
||||
async def ensure_access_token(self):
|
||||
"""Ensures there is an access token."""
|
||||
if (self.access_token is not None and
|
||||
self._access_token_expires > datetime.utcnow()):
|
||||
if self.access_token is not None and self._access_token_expires > datetime.utcnow():
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
|
@ -6,24 +6,43 @@ import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Padding
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||
from .utils import (
|
||||
remove_folder, password_to_key, password_for_validating, key_to_iv)
|
||||
from ..const import (
|
||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
||||
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_CRYPTO,
|
||||
ATTR_LAST_VERSION, ATTR_PROTECTED, ATTR_WAIT_BOOT, ATTR_SIZE,
|
||||
ATTR_REFRESH_TOKEN, CRYPTO_AES128)
|
||||
from ..coresys import CoreSysAttributes
|
||||
ATTR_ADDONS,
|
||||
ATTR_BOOT,
|
||||
ATTR_CRYPTO,
|
||||
ATTR_DATE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SSL,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CRYPTO_AES128,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import write_json_file
|
||||
from ..utils.tar import SecureTarFile
|
||||
from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder
|
||||
from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -31,14 +50,14 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class Snapshot(CoreSysAttributes):
|
||||
"""A single Hass.io snapshot."""
|
||||
|
||||
def __init__(self, coresys, tar_file):
|
||||
def __init__(self, coresys: CoreSys, tar_file: Path):
|
||||
"""Initialize a snapshot."""
|
||||
self.coresys = coresys
|
||||
self._tarfile = tar_file
|
||||
self._data = {}
|
||||
self.coresys: CoreSys = coresys
|
||||
self._tarfile: Path = tar_file
|
||||
self._data: Dict[str, Any] = {}
|
||||
self._tmp = None
|
||||
self._key = None
|
||||
self._aes = None
|
||||
self._key: Optional[bytes] = None
|
||||
self._aes: Optional[Cipher] = None
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@ -130,13 +149,11 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
# Set password
|
||||
if password:
|
||||
self._key = password_to_key(password)
|
||||
self._aes = AES.new(
|
||||
self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||
self._init_password(password)
|
||||
self._data[ATTR_PROTECTED] = password_for_validating(password)
|
||||
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
||||
|
||||
def set_password(self, password):
|
||||
def set_password(self, password: str) -> bool:
|
||||
"""Set the password for an existing snapshot."""
|
||||
if not password:
|
||||
return False
|
||||
@ -145,25 +162,39 @@ class Snapshot(CoreSysAttributes):
|
||||
if validating != self._data[ATTR_PROTECTED]:
|
||||
return False
|
||||
|
||||
self._key = password_to_key(password)
|
||||
self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
||||
self._init_password(password)
|
||||
return True
|
||||
|
||||
def _encrypt_data(self, data):
|
||||
def _init_password(self, password: str) -> None:
|
||||
"""Set password + init aes cipher."""
|
||||
self._key = password_to_key(password)
|
||||
self._aes = Cipher(
|
||||
algorithms.AES(self._key),
|
||||
modes.CBC(key_to_iv(self._key)),
|
||||
backend=default_backend(),
|
||||
)
|
||||
|
||||
def _encrypt_data(self, data: str) -> str:
|
||||
"""Make data secure."""
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return b64encode(
|
||||
self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()
|
||||
encrypt = self._aes.encryptor()
|
||||
padder = padding.PKCS7(128).padder()
|
||||
|
||||
def _decrypt_data(self, data):
|
||||
data = padder.update(data.encode()) + padder.finalize()
|
||||
return b64encode(encrypt.update(data)).decode()
|
||||
|
||||
def _decrypt_data(self, data: str) -> str:
|
||||
"""Make data readable."""
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return Padding.unpad(
|
||||
self._aes.decrypt(b64decode(data)), 16).decode()
|
||||
decrypt = self._aes.decryptor()
|
||||
padder = padding.PKCS7(128).unpadder()
|
||||
|
||||
data = padder.update(decrypt.update(b64decode(data))) + padder.finalize()
|
||||
return data.decode()
|
||||
|
||||
async def load(self):
|
||||
"""Read snapshot.json from tar file."""
|
||||
|
@ -2,14 +2,36 @@
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
||||
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_SIZE,
|
||||
ATTR_LAST_VERSION, ATTR_WAIT_BOOT, ATTR_PROTECTED, ATTR_CRYPTO,
|
||||
ATTR_ADDONS,
|
||||
ATTR_BOOT,
|
||||
ATTR_CRYPTO,
|
||||
ATTR_DATE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CRYPTO_AES128)
|
||||
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_SSL,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CRYPTO_AES128,
|
||||
FOLDER_ADDONS,
|
||||
FOLDER_HOMEASSISTANT,
|
||||
FOLDER_SHARE,
|
||||
FOLDER_SSL,
|
||||
SNAPSHOT_FULL,
|
||||
SNAPSHOT_PARTIAL,
|
||||
)
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT, REPOSITORIES
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
@ -24,34 +46,51 @@ def unique_addons(addons_list):
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_PROTECTED, 'encrypted'):
|
||||
vol.All(vol.Coerce(str), vol.Length(min=1, max=1)),
|
||||
vol.Inclusive(ATTR_CRYPTO, 'encrypted'): CRYPTO_AES128,
|
||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
}, extra=vol.REMOVE_EXTRA),
|
||||
vol.Optional(ATTR_FOLDERS, default=list):
|
||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_ADDONS, default=list): vol.All([vol.Schema({
|
||||
SCHEMA_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||
}, extra=vol.REMOVE_EXTRA)], unique_addons),
|
||||
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_PROTECTED, "encrypted"): vol.All(
|
||||
vol.Coerce(str), vol.Length(min=1, max=1)
|
||||
),
|
||||
vol.Inclusive(ATTR_CRYPTO, "encrypted"): CRYPTO_AES128,
|
||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=60)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
),
|
||||
vol.Optional(ATTR_FOLDERS, default=list): vol.All(
|
||||
[vol.In(ALL_FOLDERS)], vol.Unique()
|
||||
),
|
||||
vol.Optional(ATTR_ADDONS, default=list): vol.All(
|
||||
[
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
],
|
||||
unique_addons,
|
||||
),
|
||||
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
@ -1,7 +1,8 @@
|
||||
"""Tools file for Hass.io."""
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, timezone, tzinfo
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytz
|
||||
|
||||
@ -14,10 +15,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# All rights reserved.
|
||||
# https://github.com/django/django/blob/master/LICENSE
|
||||
DATETIME_RE = re.compile(
|
||||
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
||||
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
||||
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
||||
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
||||
r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
|
||||
r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
|
||||
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
|
||||
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
|
||||
)
|
||||
|
||||
|
||||
@ -35,25 +36,25 @@ def parse_datetime(dt_str):
|
||||
match = DATETIME_RE.match(dt_str)
|
||||
if not match:
|
||||
return None
|
||||
kws = match.groupdict() # type: Dict[str, Any]
|
||||
if kws['microsecond']:
|
||||
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
||||
tzinfo_str = kws.pop('tzinfo')
|
||||
kws: Dict[str, Any] = match.groupdict()
|
||||
if kws["microsecond"]:
|
||||
kws["microsecond"] = kws["microsecond"].ljust(6, "0")
|
||||
tzinfo_str = kws.pop("tzinfo")
|
||||
|
||||
tzinfo = None # type: Optional[dt.tzinfo]
|
||||
if tzinfo_str == 'Z':
|
||||
tzinfo = UTC
|
||||
tzinfo_val: Optional[tzinfo] = None
|
||||
if tzinfo_str == "Z":
|
||||
tzinfo_val = UTC
|
||||
elif tzinfo_str is not None:
|
||||
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||
offset_hours = int(tzinfo_str[1:3])
|
||||
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||
if tzinfo_str[0] == '-':
|
||||
if tzinfo_str[0] == "-":
|
||||
offset = -offset
|
||||
tzinfo = timezone(offset)
|
||||
tzinfo_val = timezone(offset)
|
||||
else:
|
||||
tzinfo = None
|
||||
tzinfo_val = None
|
||||
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||
kws['tzinfo'] = tzinfo
|
||||
kws["tzinfo"] = tzinfo_val
|
||||
return datetime(**kws)
|
||||
|
||||
|
||||
|
@ -1,35 +1,50 @@
|
||||
"""Tarfile fileobject handler for encrypted files."""
|
||||
import tarfile
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from typing import IO, Optional
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import (
|
||||
CipherContext,
|
||||
Cipher,
|
||||
algorithms,
|
||||
modes,
|
||||
)
|
||||
|
||||
BLOCK_SIZE = 16
|
||||
BLOCK_SIZE_BITS = 128
|
||||
|
||||
MOD_READ = 'r'
|
||||
MOD_WRITE = 'w'
|
||||
MOD_READ = "r"
|
||||
MOD_WRITE = "w"
|
||||
|
||||
|
||||
class SecureTarFile:
|
||||
"""Handle encrypted files for tarfile library."""
|
||||
|
||||
def __init__(self, name, mode, key=None, gzip=True):
|
||||
def __init__(
|
||||
self, name: Path, mode: str, key: Optional[bytes] = None, gzip: bool = True
|
||||
) -> None:
|
||||
"""Initialize encryption handler."""
|
||||
self._file = None
|
||||
self._mode = mode
|
||||
self._name = name
|
||||
self._file: Optional[IO[bytes]] = None
|
||||
self._mode: str = mode
|
||||
self._name: Path = name
|
||||
|
||||
# Tarfile options
|
||||
self._tar = None
|
||||
self._tar_mode = f"{mode}|gz" if gzip else f"{mode}|"
|
||||
self._tar: Optional[tarfile.TarFile] = None
|
||||
self._tar_mode: str = f"{mode}|gz" if gzip else f"{mode}|"
|
||||
|
||||
# Encryption/Decription
|
||||
self._aes = None
|
||||
self._key = key
|
||||
# Encryption/Description
|
||||
self._aes: Optional[Cipher] = None
|
||||
self._key: bytes = key
|
||||
|
||||
def __enter__(self):
|
||||
# Function helper
|
||||
self._decrypt: Optional[CipherContext] = None
|
||||
self._encrypt: Optional[CipherContext] = None
|
||||
|
||||
def __enter__(self) -> tarfile.TarFile:
|
||||
"""Start context manager tarfile."""
|
||||
if not self._key:
|
||||
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
|
||||
@ -42,45 +57,55 @@ class SecureTarFile:
|
||||
if self._mode == MOD_READ:
|
||||
cbc_rand = self._file.read(16)
|
||||
else:
|
||||
cbc_rand = get_random_bytes(16)
|
||||
cbc_rand = os.urandom(16)
|
||||
self._file.write(cbc_rand)
|
||||
self._aes = AES.new(
|
||||
self._key, AES.MODE_CBC, iv=_generate_iv(self._key, cbc_rand))
|
||||
|
||||
# Create Cipher
|
||||
self._aes = Cipher(
|
||||
algorithms.AES(self._key),
|
||||
modes.CBC(_generate_iv(self._key, cbc_rand)),
|
||||
backend=default_backend(),
|
||||
)
|
||||
|
||||
self._decrypt = self._aes.decryptor()
|
||||
self._encrypt = self._aes.encryptor()
|
||||
|
||||
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
|
||||
return self._tar
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||
"""Close file."""
|
||||
if self._tar:
|
||||
self._tar.close()
|
||||
if self._file:
|
||||
self._file.close()
|
||||
|
||||
def write(self, data):
|
||||
def write(self, data: bytes) -> None:
|
||||
"""Write data."""
|
||||
if len(data) % BLOCK_SIZE != 0:
|
||||
data = pad(data, BLOCK_SIZE)
|
||||
self._file.write(self._aes.encrypt(data))
|
||||
padder = padding.PKCS7(BLOCK_SIZE_BITS).padder()
|
||||
data = padder.update(data) + padder.finalize()
|
||||
|
||||
def read(self, size=0):
|
||||
self._file.write(self._encrypt.update(data))
|
||||
|
||||
def read(self, size: int = 0) -> bytes:
|
||||
"""Read data."""
|
||||
return self._aes.decrypt(self._file.read(size))
|
||||
return self._decrypt.update(self._file.read(size))
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
def path(self) -> Path:
|
||||
"""Return path object of tarfile."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
def size(self) -> int:
|
||||
"""Return snapshot size."""
|
||||
if not self._name.is_file():
|
||||
return 0
|
||||
return round(self._name.stat().st_size / 1048576, 2) # calc mbyte
|
||||
return round(self._name.stat().st_size / 1_048_576, 2) # calc mbyte
|
||||
|
||||
|
||||
def _generate_iv(key, salt):
|
||||
def _generate_iv(key: bytes, salt: bytes) -> bytes:
|
||||
"""Generate an iv from data."""
|
||||
temp_iv = key + salt
|
||||
for _ in range(100):
|
||||
|
@ -6,6 +6,7 @@ import voluptuous as vol
|
||||
|
||||
def schema_or(schema):
|
||||
"""Allow schema or empty."""
|
||||
|
||||
def _wrapper(value):
|
||||
"""Wrapper for validator."""
|
||||
if not value:
|
||||
@ -22,7 +23,7 @@ def validate_timezone(timezone):
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones"
|
||||
) from None
|
||||
|
||||
return timezone
|
||||
|
27
pylintrc
27
pylintrc
@ -15,26 +15,33 @@ reports=no
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
|
||||
disable=
|
||||
locally-disabled,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
abstract-class-little-used,
|
||||
abstract-class-not-used,
|
||||
unused-argument,
|
||||
abstract-method,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
global-statement,
|
||||
locally-disabled,
|
||||
not-context-manager,
|
||||
redefined-variable-type,
|
||||
too-few-public-methods,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-locals,
|
||||
too-many-public-methods,
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
too-many-lines,
|
||||
unused-argument,
|
||||
line-too-long,
|
||||
bad-continuation,
|
||||
too-few-public-methods,
|
||||
abstract-method,
|
||||
no-else-return,
|
||||
useless-return,
|
||||
not-async-context-manager
|
||||
no-self-use,
|
||||
not-async-context-manager,
|
||||
too-many-locals,
|
||||
too-many-branches,
|
||||
no-else-return
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=Exception,HomeAssistantError
|
||||
overgeneral-exceptions=Exception
|
||||
|
@ -1,13 +1,13 @@
|
||||
attrs==18.2.0
|
||||
async_timeout==3.0.1
|
||||
aiohttp==3.5.4
|
||||
docker==3.7.0
|
||||
colorlog==3.1.4
|
||||
voluptuous==0.11.5
|
||||
gitpython==2.1.10
|
||||
pytz==2018.5
|
||||
pyudev==0.21.0
|
||||
pycryptodome==3.6.6
|
||||
async_timeout==3.0.1
|
||||
attrs==18.2.0
|
||||
cchardet==2.1.4
|
||||
colorlog==4.0.2
|
||||
cpe==1.2.1
|
||||
cryptography==2.5
|
||||
docker==3.7.0
|
||||
gitpython==2.1.11
|
||||
pytz==2018.9
|
||||
pyudev==0.21.0
|
||||
uvloop==0.11.3
|
||||
cchardet==2.1.1
|
||||
voluptuous==0.11.5
|
||||
|
@ -1,4 +1,4 @@
|
||||
flake8==3.6.0
|
||||
flake8==3.7.5
|
||||
pylint==2.2.2
|
||||
pytest==4.1.1
|
||||
pytest-timeout==1.3.3
|
||||
|
12
setup.cfg
12
setup.cfg
@ -1,5 +1,8 @@
|
||||
[isort]
|
||||
multi_line_output = 4
|
||||
multi_line_output = 3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
line_length=88
|
||||
indent = " "
|
||||
not_skip = __init__.py
|
||||
force_sort_within_sections = true
|
||||
@ -9,9 +12,6 @@ forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
|
||||
[yapf]
|
||||
based_on_style = chromium
|
||||
indent_width = 4
|
||||
|
||||
[flake8]
|
||||
max-line-length = 80
|
||||
max-line-length = 88
|
||||
ignore = E501
|
||||
|
Loading…
x
Reference in New Issue
Block a user