mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-10 10:46:29 +00:00
Replace pycrpytodome with cryptocraphy (#923)
* Replace pycrpytodome with cryptocraphy * Fix typing * fix typing * Fix lints * Fix build * Add musl libc * Fix lint * fix lint * Fix algo * Add more typing fix crypto imports v2 * Fix padding
This commit is contained in:
parent
b6df37628d
commit
7f074142bf
@ -3,6 +3,9 @@ FROM $BUILD_FROM
|
|||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
RUN apk add --no-cache \
|
RUN apk add --no-cache \
|
||||||
|
openssl \
|
||||||
|
libffi \
|
||||||
|
musl \
|
||||||
git \
|
git \
|
||||||
socat \
|
socat \
|
||||||
glib \
|
glib \
|
||||||
@ -14,6 +17,9 @@ COPY requirements.txt /usr/src/
|
|||||||
RUN apk add --no-cache --virtual .build-dependencies \
|
RUN apk add --no-cache --virtual .build-dependencies \
|
||||||
make \
|
make \
|
||||||
g++ \
|
g++ \
|
||||||
|
openssl-dev \
|
||||||
|
libffi-dev \
|
||||||
|
musl-dev \
|
||||||
&& export MAKEFLAGS="-j$(nproc)" \
|
&& export MAKEFLAGS="-j$(nproc)" \
|
||||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||||
&& apk del .build-dependencies \
|
&& apk del .build-dependencies \
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Util add-ons functions."""
|
"""Util add-ons functions."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
@ -7,10 +8,18 @@ from pathlib import Path
|
|||||||
import re
|
import re
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from ..const import (PRIVILEGED_DAC_READ_SEARCH, PRIVILEGED_NET_ADMIN,
|
from ..const import (
|
||||||
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_MODULE,
|
PRIVILEGED_DAC_READ_SEARCH,
|
||||||
PRIVILEGED_SYS_PTRACE, PRIVILEGED_SYS_RAWIO, ROLE_ADMIN,
|
PRIVILEGED_NET_ADMIN,
|
||||||
ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE)
|
PRIVILEGED_SYS_ADMIN,
|
||||||
|
PRIVILEGED_SYS_MODULE,
|
||||||
|
PRIVILEGED_SYS_PTRACE,
|
||||||
|
PRIVILEGED_SYS_RAWIO,
|
||||||
|
ROLE_ADMIN,
|
||||||
|
ROLE_MANAGER,
|
||||||
|
SECURITY_DISABLE,
|
||||||
|
SECURITY_PROFILE,
|
||||||
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
@ -38,16 +47,17 @@ def rating_security(addon: Addon) -> int:
|
|||||||
rating += 1
|
rating += 1
|
||||||
|
|
||||||
# Privileged options
|
# Privileged options
|
||||||
# pylint: disable=bad-continuation
|
|
||||||
if any(
|
if any(
|
||||||
privilege in addon.privileged for privilege in (
|
privilege in addon.privileged
|
||||||
|
for privilege in (
|
||||||
PRIVILEGED_NET_ADMIN,
|
PRIVILEGED_NET_ADMIN,
|
||||||
PRIVILEGED_SYS_ADMIN,
|
PRIVILEGED_SYS_ADMIN,
|
||||||
PRIVILEGED_SYS_RAWIO,
|
PRIVILEGED_SYS_RAWIO,
|
||||||
PRIVILEGED_SYS_PTRACE,
|
PRIVILEGED_SYS_PTRACE,
|
||||||
PRIVILEGED_SYS_MODULE,
|
PRIVILEGED_SYS_MODULE,
|
||||||
PRIVILEGED_DAC_READ_SEARCH,
|
PRIVILEGED_DAC_READ_SEARCH,
|
||||||
)):
|
)
|
||||||
|
):
|
||||||
rating += -1
|
rating += -1
|
||||||
|
|
||||||
# API Hass.io role
|
# API Hass.io role
|
||||||
@ -107,7 +117,8 @@ async def remove_data(folder: Path) -> None:
|
|||||||
"""Remove folder and reset privileged."""
|
"""Remove folder and reset privileged."""
|
||||||
try:
|
try:
|
||||||
proc = await asyncio.create_subprocess_exec(
|
proc = await asyncio.create_subprocess_exec(
|
||||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL)
|
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||||
|
)
|
||||||
|
|
||||||
_, error_msg = await proc.communicate()
|
_, error_msg = await proc.communicate()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
|
@ -144,8 +144,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
return client
|
return client
|
||||||
|
|
||||||
# Renew the Token is invalid
|
# Renew the Token is invalid
|
||||||
if (data.get('type') == 'invalid_auth' and
|
if data.get('type') == 'invalid_auth' and self.sys_homeassistant.refresh_token:
|
||||||
self.sys_homeassistant.refresh_token):
|
|
||||||
self.sys_homeassistant.access_token = None
|
self.sys_homeassistant.access_token = None
|
||||||
return await self._websocket_client()
|
return await self._websocket_client()
|
||||||
|
|
||||||
@ -175,8 +174,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
# Check API access
|
# Check API access
|
||||||
response = await server.receive_json()
|
response = await server.receive_json()
|
||||||
hassio_token = (response.get('api_password') or
|
hassio_token = response.get('api_password') or response.get('access_token')
|
||||||
response.get('access_token'))
|
|
||||||
addon = self.sys_addons.from_token(hassio_token)
|
addon = self.sys_addons.from_token(hassio_token)
|
||||||
|
|
||||||
if not addon or not addon.access_homeassistant_api:
|
if not addon or not addon.access_homeassistant_api:
|
||||||
|
@ -369,8 +369,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
|
|
||||||
async def ensure_access_token(self):
|
async def ensure_access_token(self):
|
||||||
"""Ensures there is an access token."""
|
"""Ensures there is an access token."""
|
||||||
if (self.access_token is not None and
|
if self.access_token is not None and self._access_token_expires > datetime.utcnow():
|
||||||
self._access_token_expires > datetime.utcnow()):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
@ -6,24 +6,43 @@ import logging
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from Crypto.Cipher import AES
|
from cryptography.hazmat.backends import default_backend
|
||||||
from Crypto.Util import Padding
|
from cryptography.hazmat.primitives import padding
|
||||||
|
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
|
||||||
from .utils import (
|
|
||||||
remove_folder, password_to_key, password_for_validating, key_to_iv)
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_ADDONS,
|
||||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
|
ATTR_BOOT,
|
||||||
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_CRYPTO,
|
ATTR_CRYPTO,
|
||||||
ATTR_LAST_VERSION, ATTR_PROTECTED, ATTR_WAIT_BOOT, ATTR_SIZE,
|
ATTR_DATE,
|
||||||
ATTR_REFRESH_TOKEN, CRYPTO_AES128)
|
ATTR_FOLDERS,
|
||||||
from ..coresys import CoreSysAttributes
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_IMAGE,
|
||||||
|
ATTR_LAST_VERSION,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_PASSWORD,
|
||||||
|
ATTR_PORT,
|
||||||
|
ATTR_PROTECTED,
|
||||||
|
ATTR_REFRESH_TOKEN,
|
||||||
|
ATTR_REPOSITORIES,
|
||||||
|
ATTR_SIZE,
|
||||||
|
ATTR_SLUG,
|
||||||
|
ATTR_SSL,
|
||||||
|
ATTR_TYPE,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_WAIT_BOOT,
|
||||||
|
ATTR_WATCHDOG,
|
||||||
|
CRYPTO_AES128,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..utils.json import write_json_file
|
from ..utils.json import write_json_file
|
||||||
from ..utils.tar import SecureTarFile
|
from ..utils.tar import SecureTarFile
|
||||||
|
from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder
|
||||||
|
from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -31,14 +50,14 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class Snapshot(CoreSysAttributes):
|
class Snapshot(CoreSysAttributes):
|
||||||
"""A single Hass.io snapshot."""
|
"""A single Hass.io snapshot."""
|
||||||
|
|
||||||
def __init__(self, coresys, tar_file):
|
def __init__(self, coresys: CoreSys, tar_file: Path):
|
||||||
"""Initialize a snapshot."""
|
"""Initialize a snapshot."""
|
||||||
self.coresys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self._tarfile = tar_file
|
self._tarfile: Path = tar_file
|
||||||
self._data = {}
|
self._data: Dict[str, Any] = {}
|
||||||
self._tmp = None
|
self._tmp = None
|
||||||
self._key = None
|
self._key: Optional[bytes] = None
|
||||||
self._aes = None
|
self._aes: Optional[Cipher] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
@ -130,13 +149,11 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
# Set password
|
# Set password
|
||||||
if password:
|
if password:
|
||||||
self._key = password_to_key(password)
|
self._init_password(password)
|
||||||
self._aes = AES.new(
|
|
||||||
self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
|
||||||
self._data[ATTR_PROTECTED] = password_for_validating(password)
|
self._data[ATTR_PROTECTED] = password_for_validating(password)
|
||||||
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
self._data[ATTR_CRYPTO] = CRYPTO_AES128
|
||||||
|
|
||||||
def set_password(self, password):
|
def set_password(self, password: str) -> bool:
|
||||||
"""Set the password for an existing snapshot."""
|
"""Set the password for an existing snapshot."""
|
||||||
if not password:
|
if not password:
|
||||||
return False
|
return False
|
||||||
@ -145,25 +162,39 @@ class Snapshot(CoreSysAttributes):
|
|||||||
if validating != self._data[ATTR_PROTECTED]:
|
if validating != self._data[ATTR_PROTECTED]:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._key = password_to_key(password)
|
self._init_password(password)
|
||||||
self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _encrypt_data(self, data):
|
def _init_password(self, password: str) -> None:
|
||||||
|
"""Set password + init aes cipher."""
|
||||||
|
self._key = password_to_key(password)
|
||||||
|
self._aes = Cipher(
|
||||||
|
algorithms.AES(self._key),
|
||||||
|
modes.CBC(key_to_iv(self._key)),
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _encrypt_data(self, data: str) -> str:
|
||||||
"""Make data secure."""
|
"""Make data secure."""
|
||||||
if not self._key or data is None:
|
if not self._key or data is None:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
return b64encode(
|
encrypt = self._aes.encryptor()
|
||||||
self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()
|
padder = padding.PKCS7(128).padder()
|
||||||
|
|
||||||
def _decrypt_data(self, data):
|
data = padder.update(data.encode()) + padder.finalize()
|
||||||
|
return b64encode(encrypt.update(data)).decode()
|
||||||
|
|
||||||
|
def _decrypt_data(self, data: str) -> str:
|
||||||
"""Make data readable."""
|
"""Make data readable."""
|
||||||
if not self._key or data is None:
|
if not self._key or data is None:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
return Padding.unpad(
|
decrypt = self._aes.decryptor()
|
||||||
self._aes.decrypt(b64decode(data)), 16).decode()
|
padder = padding.PKCS7(128).unpadder()
|
||||||
|
|
||||||
|
data = padder.update(decrypt.update(b64decode(data))) + padder.finalize()
|
||||||
|
return data.decode()
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Read snapshot.json from tar file."""
|
"""Read snapshot.json from tar file."""
|
||||||
|
@ -2,14 +2,36 @@
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
ATTR_ADDONS,
|
||||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
|
ATTR_BOOT,
|
||||||
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_SIZE,
|
ATTR_CRYPTO,
|
||||||
ATTR_LAST_VERSION, ATTR_WAIT_BOOT, ATTR_PROTECTED, ATTR_CRYPTO,
|
ATTR_DATE,
|
||||||
|
ATTR_FOLDERS,
|
||||||
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_IMAGE,
|
||||||
|
ATTR_LAST_VERSION,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_PASSWORD,
|
||||||
|
ATTR_PORT,
|
||||||
|
ATTR_PROTECTED,
|
||||||
ATTR_REFRESH_TOKEN,
|
ATTR_REFRESH_TOKEN,
|
||||||
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
ATTR_REPOSITORIES,
|
||||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CRYPTO_AES128)
|
ATTR_SIZE,
|
||||||
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
|
ATTR_SLUG,
|
||||||
|
ATTR_SSL,
|
||||||
|
ATTR_TYPE,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_WAIT_BOOT,
|
||||||
|
ATTR_WATCHDOG,
|
||||||
|
CRYPTO_AES128,
|
||||||
|
FOLDER_ADDONS,
|
||||||
|
FOLDER_HOMEASSISTANT,
|
||||||
|
FOLDER_SHARE,
|
||||||
|
FOLDER_SSL,
|
||||||
|
SNAPSHOT_FULL,
|
||||||
|
SNAPSHOT_PARTIAL,
|
||||||
|
)
|
||||||
|
from ..validate import DOCKER_IMAGE, NETWORK_PORT, REPOSITORIES
|
||||||
|
|
||||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||||
|
|
||||||
@ -24,34 +46,51 @@ def unique_addons(addons_list):
|
|||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_SNAPSHOT = vol.Schema({
|
SCHEMA_SNAPSHOT = vol.Schema(
|
||||||
|
{
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||||
vol.Inclusive(ATTR_PROTECTED, 'encrypted'):
|
vol.Inclusive(ATTR_PROTECTED, "encrypted"): vol.All(
|
||||||
vol.All(vol.Coerce(str), vol.Length(min=1, max=1)),
|
vol.Coerce(str), vol.Length(min=1, max=1)
|
||||||
vol.Inclusive(ATTR_CRYPTO, 'encrypted'): CRYPTO_AES128,
|
),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
|
vol.Inclusive(ATTR_CRYPTO, "encrypted"): CRYPTO_AES128,
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema(
|
||||||
|
{
|
||||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
|
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE,
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_WAIT_BOOT, default=600):
|
vol.Optional(ATTR_WAIT_BOOT, default=600): vol.All(
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
vol.Coerce(int), vol.Range(min=60)
|
||||||
}, extra=vol.REMOVE_EXTRA),
|
),
|
||||||
vol.Optional(ATTR_FOLDERS, default=list):
|
},
|
||||||
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
extra=vol.REMOVE_EXTRA,
|
||||||
vol.Optional(ATTR_ADDONS, default=list): vol.All([vol.Schema({
|
),
|
||||||
|
vol.Optional(ATTR_FOLDERS, default=list): vol.All(
|
||||||
|
[vol.In(ALL_FOLDERS)], vol.Unique()
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_ADDONS, default=list): vol.All(
|
||||||
|
[
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
|
||||||
}, extra=vol.REMOVE_EXTRA)], unique_addons),
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
unique_addons,
|
||||||
|
),
|
||||||
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
|
||||||
}, extra=vol.ALLOW_EXTRA)
|
},
|
||||||
|
extra=vol.ALLOW_EXTRA,
|
||||||
|
)
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
"""Tools file for Hass.io."""
|
"""Tools file for Hass.io."""
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone, tzinfo
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
@ -14,10 +15,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
# https://github.com/django/django/blob/master/LICENSE
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
DATETIME_RE = re.compile(
|
DATETIME_RE = re.compile(
|
||||||
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
|
||||||
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
|
||||||
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
|
||||||
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -35,25 +36,25 @@ def parse_datetime(dt_str):
|
|||||||
match = DATETIME_RE.match(dt_str)
|
match = DATETIME_RE.match(dt_str)
|
||||||
if not match:
|
if not match:
|
||||||
return None
|
return None
|
||||||
kws = match.groupdict() # type: Dict[str, Any]
|
kws: Dict[str, Any] = match.groupdict()
|
||||||
if kws['microsecond']:
|
if kws["microsecond"]:
|
||||||
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
kws["microsecond"] = kws["microsecond"].ljust(6, "0")
|
||||||
tzinfo_str = kws.pop('tzinfo')
|
tzinfo_str = kws.pop("tzinfo")
|
||||||
|
|
||||||
tzinfo = None # type: Optional[dt.tzinfo]
|
tzinfo_val: Optional[tzinfo] = None
|
||||||
if tzinfo_str == 'Z':
|
if tzinfo_str == "Z":
|
||||||
tzinfo = UTC
|
tzinfo_val = UTC
|
||||||
elif tzinfo_str is not None:
|
elif tzinfo_str is not None:
|
||||||
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||||
offset_hours = int(tzinfo_str[1:3])
|
offset_hours = int(tzinfo_str[1:3])
|
||||||
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||||
if tzinfo_str[0] == '-':
|
if tzinfo_str[0] == "-":
|
||||||
offset = -offset
|
offset = -offset
|
||||||
tzinfo = timezone(offset)
|
tzinfo_val = timezone(offset)
|
||||||
else:
|
else:
|
||||||
tzinfo = None
|
tzinfo_val = None
|
||||||
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||||
kws['tzinfo'] = tzinfo
|
kws["tzinfo"] = tzinfo_val
|
||||||
return datetime(**kws)
|
return datetime(**kws)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,35 +1,50 @@
|
|||||||
"""Tarfile fileobject handler for encrypted files."""
|
"""Tarfile fileobject handler for encrypted files."""
|
||||||
import tarfile
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import tarfile
|
||||||
|
from typing import IO, Optional
|
||||||
|
|
||||||
from Crypto.Cipher import AES
|
from cryptography.hazmat.backends import default_backend
|
||||||
from Crypto.Random import get_random_bytes
|
from cryptography.hazmat.primitives import padding
|
||||||
from Crypto.Util.Padding import pad
|
from cryptography.hazmat.primitives.ciphers import (
|
||||||
|
CipherContext,
|
||||||
|
Cipher,
|
||||||
|
algorithms,
|
||||||
|
modes,
|
||||||
|
)
|
||||||
|
|
||||||
BLOCK_SIZE = 16
|
BLOCK_SIZE = 16
|
||||||
|
BLOCK_SIZE_BITS = 128
|
||||||
|
|
||||||
MOD_READ = 'r'
|
MOD_READ = "r"
|
||||||
MOD_WRITE = 'w'
|
MOD_WRITE = "w"
|
||||||
|
|
||||||
|
|
||||||
class SecureTarFile:
|
class SecureTarFile:
|
||||||
"""Handle encrypted files for tarfile library."""
|
"""Handle encrypted files for tarfile library."""
|
||||||
|
|
||||||
def __init__(self, name, mode, key=None, gzip=True):
|
def __init__(
|
||||||
|
self, name: Path, mode: str, key: Optional[bytes] = None, gzip: bool = True
|
||||||
|
) -> None:
|
||||||
"""Initialize encryption handler."""
|
"""Initialize encryption handler."""
|
||||||
self._file = None
|
self._file: Optional[IO[bytes]] = None
|
||||||
self._mode = mode
|
self._mode: str = mode
|
||||||
self._name = name
|
self._name: Path = name
|
||||||
|
|
||||||
# Tarfile options
|
# Tarfile options
|
||||||
self._tar = None
|
self._tar: Optional[tarfile.TarFile] = None
|
||||||
self._tar_mode = f"{mode}|gz" if gzip else f"{mode}|"
|
self._tar_mode: str = f"{mode}|gz" if gzip else f"{mode}|"
|
||||||
|
|
||||||
# Encryption/Decription
|
# Encryption/Description
|
||||||
self._aes = None
|
self._aes: Optional[Cipher] = None
|
||||||
self._key = key
|
self._key: bytes = key
|
||||||
|
|
||||||
def __enter__(self):
|
# Function helper
|
||||||
|
self._decrypt: Optional[CipherContext] = None
|
||||||
|
self._encrypt: Optional[CipherContext] = None
|
||||||
|
|
||||||
|
def __enter__(self) -> tarfile.TarFile:
|
||||||
"""Start context manager tarfile."""
|
"""Start context manager tarfile."""
|
||||||
if not self._key:
|
if not self._key:
|
||||||
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
|
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
|
||||||
@ -42,45 +57,55 @@ class SecureTarFile:
|
|||||||
if self._mode == MOD_READ:
|
if self._mode == MOD_READ:
|
||||||
cbc_rand = self._file.read(16)
|
cbc_rand = self._file.read(16)
|
||||||
else:
|
else:
|
||||||
cbc_rand = get_random_bytes(16)
|
cbc_rand = os.urandom(16)
|
||||||
self._file.write(cbc_rand)
|
self._file.write(cbc_rand)
|
||||||
self._aes = AES.new(
|
|
||||||
self._key, AES.MODE_CBC, iv=_generate_iv(self._key, cbc_rand))
|
# Create Cipher
|
||||||
|
self._aes = Cipher(
|
||||||
|
algorithms.AES(self._key),
|
||||||
|
modes.CBC(_generate_iv(self._key, cbc_rand)),
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
|
||||||
|
self._decrypt = self._aes.decryptor()
|
||||||
|
self._encrypt = self._aes.encryptor()
|
||||||
|
|
||||||
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
|
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
|
||||||
return self._tar
|
return self._tar
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, traceback):
|
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||||
"""Close file."""
|
"""Close file."""
|
||||||
if self._tar:
|
if self._tar:
|
||||||
self._tar.close()
|
self._tar.close()
|
||||||
if self._file:
|
if self._file:
|
||||||
self._file.close()
|
self._file.close()
|
||||||
|
|
||||||
def write(self, data):
|
def write(self, data: bytes) -> None:
|
||||||
"""Write data."""
|
"""Write data."""
|
||||||
if len(data) % BLOCK_SIZE != 0:
|
if len(data) % BLOCK_SIZE != 0:
|
||||||
data = pad(data, BLOCK_SIZE)
|
padder = padding.PKCS7(BLOCK_SIZE_BITS).padder()
|
||||||
self._file.write(self._aes.encrypt(data))
|
data = padder.update(data) + padder.finalize()
|
||||||
|
|
||||||
def read(self, size=0):
|
self._file.write(self._encrypt.update(data))
|
||||||
|
|
||||||
|
def read(self, size: int = 0) -> bytes:
|
||||||
"""Read data."""
|
"""Read data."""
|
||||||
return self._aes.decrypt(self._file.read(size))
|
return self._decrypt.update(self._file.read(size))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self) -> Path:
|
||||||
"""Return path object of tarfile."""
|
"""Return path object of tarfile."""
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self) -> int:
|
||||||
"""Return snapshot size."""
|
"""Return snapshot size."""
|
||||||
if not self._name.is_file():
|
if not self._name.is_file():
|
||||||
return 0
|
return 0
|
||||||
return round(self._name.stat().st_size / 1048576, 2) # calc mbyte
|
return round(self._name.stat().st_size / 1_048_576, 2) # calc mbyte
|
||||||
|
|
||||||
|
|
||||||
def _generate_iv(key, salt):
|
def _generate_iv(key: bytes, salt: bytes) -> bytes:
|
||||||
"""Generate an iv from data."""
|
"""Generate an iv from data."""
|
||||||
temp_iv = key + salt
|
temp_iv = key + salt
|
||||||
for _ in range(100):
|
for _ in range(100):
|
||||||
|
@ -6,6 +6,7 @@ import voluptuous as vol
|
|||||||
|
|
||||||
def schema_or(schema):
|
def schema_or(schema):
|
||||||
"""Allow schema or empty."""
|
"""Allow schema or empty."""
|
||||||
|
|
||||||
def _wrapper(value):
|
def _wrapper(value):
|
||||||
"""Wrapper for validator."""
|
"""Wrapper for validator."""
|
||||||
if not value:
|
if not value:
|
||||||
@ -22,7 +23,7 @@ def validate_timezone(timezone):
|
|||||||
except pytz.exceptions.UnknownTimeZoneError:
|
except pytz.exceptions.UnknownTimeZoneError:
|
||||||
raise vol.Invalid(
|
raise vol.Invalid(
|
||||||
"Invalid time zone passed in. Valid options can be found here: "
|
"Invalid time zone passed in. Valid options can be found here: "
|
||||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones"
|
||||||
from None
|
) from None
|
||||||
|
|
||||||
return timezone
|
return timezone
|
||||||
|
27
pylintrc
27
pylintrc
@ -15,26 +15,33 @@ reports=no
|
|||||||
# abstract-method - with intro of async there are always methods missing
|
# abstract-method - with intro of async there are always methods missing
|
||||||
|
|
||||||
disable=
|
disable=
|
||||||
locally-disabled,
|
|
||||||
duplicate-code,
|
|
||||||
cyclic-import,
|
|
||||||
abstract-class-little-used,
|
abstract-class-little-used,
|
||||||
abstract-class-not-used,
|
abstract-class-not-used,
|
||||||
unused-argument,
|
abstract-method,
|
||||||
|
cyclic-import,
|
||||||
|
duplicate-code,
|
||||||
|
global-statement,
|
||||||
|
locally-disabled,
|
||||||
|
not-context-manager,
|
||||||
redefined-variable-type,
|
redefined-variable-type,
|
||||||
|
too-few-public-methods,
|
||||||
too-many-arguments,
|
too-many-arguments,
|
||||||
too-many-branches,
|
too-many-branches,
|
||||||
too-many-instance-attributes,
|
too-many-instance-attributes,
|
||||||
|
too-many-lines,
|
||||||
too-many-locals,
|
too-many-locals,
|
||||||
too-many-public-methods,
|
too-many-public-methods,
|
||||||
too-many-return-statements,
|
too-many-return-statements,
|
||||||
too-many-statements,
|
too-many-statements,
|
||||||
too-many-lines,
|
unused-argument,
|
||||||
|
line-too-long,
|
||||||
|
bad-continuation,
|
||||||
too-few-public-methods,
|
too-few-public-methods,
|
||||||
abstract-method,
|
no-self-use,
|
||||||
no-else-return,
|
not-async-context-manager,
|
||||||
useless-return,
|
too-many-locals,
|
||||||
not-async-context-manager
|
too-many-branches,
|
||||||
|
no-else-return
|
||||||
|
|
||||||
[EXCEPTIONS]
|
[EXCEPTIONS]
|
||||||
overgeneral-exceptions=Exception,HomeAssistantError
|
overgeneral-exceptions=Exception
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
attrs==18.2.0
|
|
||||||
async_timeout==3.0.1
|
|
||||||
aiohttp==3.5.4
|
aiohttp==3.5.4
|
||||||
docker==3.7.0
|
async_timeout==3.0.1
|
||||||
colorlog==3.1.4
|
attrs==18.2.0
|
||||||
voluptuous==0.11.5
|
cchardet==2.1.4
|
||||||
gitpython==2.1.10
|
colorlog==4.0.2
|
||||||
pytz==2018.5
|
|
||||||
pyudev==0.21.0
|
|
||||||
pycryptodome==3.6.6
|
|
||||||
cpe==1.2.1
|
cpe==1.2.1
|
||||||
|
cryptography==2.5
|
||||||
|
docker==3.7.0
|
||||||
|
gitpython==2.1.11
|
||||||
|
pytz==2018.9
|
||||||
|
pyudev==0.21.0
|
||||||
uvloop==0.11.3
|
uvloop==0.11.3
|
||||||
cchardet==2.1.1
|
voluptuous==0.11.5
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
flake8==3.6.0
|
flake8==3.7.5
|
||||||
pylint==2.2.2
|
pylint==2.2.2
|
||||||
pytest==4.1.1
|
pytest==4.1.1
|
||||||
pytest-timeout==1.3.3
|
pytest-timeout==1.3.3
|
||||||
|
12
setup.cfg
12
setup.cfg
@ -1,5 +1,8 @@
|
|||||||
[isort]
|
[isort]
|
||||||
multi_line_output = 4
|
multi_line_output = 3
|
||||||
|
include_trailing_comma=True
|
||||||
|
force_grid_wrap=0
|
||||||
|
line_length=88
|
||||||
indent = " "
|
indent = " "
|
||||||
not_skip = __init__.py
|
not_skip = __init__.py
|
||||||
force_sort_within_sections = true
|
force_sort_within_sections = true
|
||||||
@ -9,9 +12,6 @@ forced_separate = tests
|
|||||||
combine_as_imports = true
|
combine_as_imports = true
|
||||||
use_parentheses = true
|
use_parentheses = true
|
||||||
|
|
||||||
[yapf]
|
|
||||||
based_on_style = chromium
|
|
||||||
indent_width = 4
|
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 80
|
max-line-length = 88
|
||||||
|
ignore = E501
|
||||||
|
Loading…
x
Reference in New Issue
Block a user