mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-14 20:56:30 +00:00
Init ingress session boarder / lookup (#995)
* Init ingress session boarder / lookup * Add session to API * Add cokkie validate * Do it without event bus * Add logger * Fix validation * Add tests * Update tests * Mock json storage
This commit is contained in:
parent
1edec61133
commit
67f562a846
11
API.md
11
API.md
@ -588,9 +588,20 @@ Write data to add-on stdin
|
||||
|
||||
### ingress
|
||||
|
||||
- POST `/ingress/session`
|
||||
|
||||
Create a new Session for access to ingress service.
|
||||
|
||||
```json
|
||||
{
|
||||
"session": "token"
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
Need ingress session as cookie.
|
||||
|
||||
### discovery
|
||||
|
||||
|
@ -6,6 +6,7 @@ from ipaddress import IPv4Address, ip_address
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
@ -89,7 +90,6 @@ from ..exceptions import (
|
||||
HostAppArmorError,
|
||||
JsonFileError,
|
||||
)
|
||||
from ..utils import create_token
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from .utils import check_installed, remove_data
|
||||
@ -896,7 +896,7 @@ class Addon(CoreSysAttributes):
|
||||
return
|
||||
|
||||
# Access Token
|
||||
self._data.user[self._id][ATTR_ACCESS_TOKEN] = create_token()
|
||||
self._data.user[self._id][ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
|
||||
# Options
|
||||
|
@ -194,6 +194,7 @@ class RestAPI(CoreSysAttributes):
|
||||
api_ingress.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/ingress/session', api_ingress.create_session),
|
||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
||||
])
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from typing import Dict, Union
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
@ -14,8 +14,9 @@ from aiohttp.web_exceptions import (
|
||||
from multidict import CIMultiDict, istr
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import HEADER_TOKEN, REQUEST_FROM
|
||||
from ..const import ATTR_SESSION, HEADER_TOKEN, REQUEST_FROM, COOKIE_INGRESS
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -28,34 +29,45 @@ class APIIngress(CoreSysAttributes):
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
for addon in self.sys_addons.list_installed:
|
||||
if addon.ingress_token != token:
|
||||
continue
|
||||
return addon
|
||||
addon = self.sys_ingress.get(token)
|
||||
if not addon:
|
||||
_LOGGER.warning("Ingress for %s not available", token)
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
_LOGGER.warning("Ingress for %s not available", token)
|
||||
raise HTTPServiceUnavailable()
|
||||
return addon
|
||||
|
||||
def _check_ha_access(self, request: web.Request) -> None:
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
def _create_url(self, addon: Addon, path: str) -> str:
|
||||
"""Create URL to container."""
|
||||
return f"{addon.ingress_internal}/{path}"
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Hass.io ingress service."""
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Only Home Assistant call this
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
if not addon.with_ingress:
|
||||
_LOGGER.warning("Add-on %s don't support ingress feature", addon.slug)
|
||||
raise HTTPBadGateway()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
|
@ -19,6 +19,7 @@ from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
@ -49,6 +50,7 @@ async def initialize_coresys():
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
@ -71,8 +73,9 @@ def initialize_system_data(coresys):
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info("Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
||||
)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
@ -82,18 +85,19 @@ def initialize_system_data(coresys):
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s",
|
||||
config.path_addons_data)
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on local repository folder %s",
|
||||
config.path_addons_local)
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
||||
)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
||||
)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
@ -154,7 +158,8 @@ def initialize_logging():
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_environment():
|
||||
@ -188,19 +193,16 @@ def check_environment():
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGTERM,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGHUP,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
@ -23,6 +23,7 @@ FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
|
||||
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
|
||||
FILE_HASSIO_INGRESS = Path(HASSIO_DATA, "ingress.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
|
||||
@ -54,6 +55,7 @@ CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
HEADER_HA_ACCESS = "X-Ha-Access"
|
||||
HEADER_TOKEN = "X-Hassio-Key"
|
||||
COOKIE_INGRESS = "ingress_session"
|
||||
|
||||
ENV_TOKEN = "HASSIO_TOKEN"
|
||||
ENV_TIME = "TZ"
|
||||
@ -194,6 +196,7 @@ ATTR_INGRESS_ENTRY = "ingress_entry"
|
||||
ATTR_INGRESS_TOKEN = "ingress_token"
|
||||
ATTR_INGRESS_URL = "ingress_url"
|
||||
ATTR_IP_ADDRESS = "ip_address"
|
||||
ATTR_SESSION = "session"
|
||||
|
||||
PROVIDE_SERVICE = "provide"
|
||||
NEED_SERVICE = "need"
|
||||
|
@ -62,6 +62,9 @@ class HassIO(CoreSysAttributes):
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# Load ingress
|
||||
await self.sys_ingress.load()
|
||||
|
||||
# start dns forwarding
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
@ -131,6 +134,7 @@ class HassIO(CoreSysAttributes):
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
self.sys_ingress.unload(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
|
@ -23,6 +23,7 @@ if TYPE_CHECKING:
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
@ -63,6 +64,7 @@ class CoreSys:
|
||||
self._snapshots: SnapshotManager = None
|
||||
self._tasks: Tasks = None
|
||||
self._host: HostManager = None
|
||||
self._ingress: Ingress = None
|
||||
self._dbus: DBusManager = None
|
||||
self._hassos: HassOS = None
|
||||
self._services: ServiceManager = None
|
||||
@ -293,6 +295,18 @@ class CoreSys:
|
||||
raise RuntimeError("HostManager already set!")
|
||||
self._host = value
|
||||
|
||||
@property
|
||||
def ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
return self._ingress
|
||||
|
||||
@ingress.setter
|
||||
def ingress(self, value: Ingress):
|
||||
"""Set a Ingress object."""
|
||||
if self._ingress:
|
||||
raise RuntimeError("Ingress already set!")
|
||||
self._ingress = value
|
||||
|
||||
@property
|
||||
def hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
@ -441,6 +455,11 @@ class CoreSysAttributes:
|
||||
"""Return HostManager object."""
|
||||
return self.coresys.host
|
||||
|
||||
@property
|
||||
def sys_ingress(self) -> Ingress:
|
||||
"""Return Ingress object."""
|
||||
return self.coresys.ingress
|
||||
|
||||
@property
|
||||
def sys_hassos(self) -> HassOS:
|
||||
"""Return HassOS object."""
|
||||
|
@ -7,6 +7,7 @@ import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import secrets
|
||||
import socket
|
||||
import time
|
||||
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
||||
@ -35,13 +36,13 @@ from .coresys import CoreSys, CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .docker.stats import DockerStats
|
||||
from .exceptions import (
|
||||
DockerAPIError,
|
||||
HomeAssistantAPIError,
|
||||
HomeAssistantAuthError,
|
||||
HomeAssistantError,
|
||||
HomeAssistantUpdateError,
|
||||
DockerAPIError
|
||||
)
|
||||
from .utils import convert_to_ascii, create_token, process_lock
|
||||
from .utils import convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
@ -314,7 +315,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return
|
||||
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = create_token()
|
||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
|
||||
try:
|
||||
|
103
hassio/ingress.py
Normal file
103
hassio/ingress.py
Normal file
@ -0,0 +1,103 @@
|
||||
"""Fetch last versions from webserver."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
import secrets
|
||||
|
||||
from .addons.addon import Addon
|
||||
from .const import ATTR_SESSION, FILE_HASSIO_INGRESS
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .utils.json import JsonConfig
|
||||
from .utils.dt import utcnow, utc_from_timestamp
|
||||
from .validate import SCHEMA_INGRESS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Ingress(JsonConfig, CoreSysAttributes):
|
||||
"""Fetch last versions from version.json."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_INGRESS, SCHEMA_INGRESS_CONFIG)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.tokens: Dict[str, str] = {}
|
||||
|
||||
def get(self, token: str) -> Optional[Addon]:
|
||||
"""Return addon they have this ingress token."""
|
||||
if token not in self.tokens:
|
||||
self._update_token_list()
|
||||
return self.sys_addons.get(self.tokens.get(token))
|
||||
|
||||
@property
|
||||
def sessions(self) -> Dict[str, float]:
|
||||
"""Return sessions."""
|
||||
return self._data[ATTR_SESSION]
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Update internal data."""
|
||||
self._update_token_list()
|
||||
self._cleanup_sessions()
|
||||
|
||||
_LOGGER.info("Load %d ingress session", len(self.sessions))
|
||||
|
||||
async def reload(self) -> None:
|
||||
"""Reload/Validate sessions."""
|
||||
self._cleanup_sessions()
|
||||
|
||||
async def unload(self) -> None:
|
||||
"""Shutdown sessions."""
|
||||
self.save_data()
|
||||
|
||||
def _cleanup_sessions(self) -> None:
|
||||
"""Remove not used sessions."""
|
||||
now = utcnow()
|
||||
|
||||
sessions = {}
|
||||
for session, valid in self.sessions.items():
|
||||
valid_dt = utc_from_timestamp(valid)
|
||||
if valid_dt < now:
|
||||
continue
|
||||
|
||||
# Is valid
|
||||
sessions[session] = valid
|
||||
|
||||
# Write back
|
||||
self.sessions.clear()
|
||||
self.sessions.update(sessions)
|
||||
|
||||
def _update_token_list(self) -> None:
|
||||
"""Regenerate token <-> Add-on map."""
|
||||
self.tokens.clear()
|
||||
|
||||
# Read all ingress token and build a map
|
||||
for addon in self.sys_addons.list_installed:
|
||||
if not addon.with_ingress:
|
||||
continue
|
||||
self.tokens[addon.ingress_token] = addon.slug
|
||||
|
||||
def create_session(self) -> str:
|
||||
"""Create new session."""
|
||||
session = secrets.token_hex(64)
|
||||
valid = utcnow() + timedelta(minutes=15)
|
||||
|
||||
self.sessions[session] = valid.timestamp()
|
||||
self.save_data()
|
||||
|
||||
return session
|
||||
|
||||
def validate_session(self, session: str) -> bool:
|
||||
"""Return True if session valid and make it longer valid."""
|
||||
if session not in self.sessions:
|
||||
return False
|
||||
valid_until = utc_from_timestamp(self.sessions[session])
|
||||
|
||||
# Is still valid?
|
||||
if valid_until < utcnow():
|
||||
return False
|
||||
|
||||
# Update time
|
||||
valid_until = valid_until + timedelta(minutes=15)
|
||||
self.sessions[session] = valid_until.timestamp()
|
||||
|
||||
return True
|
101
hassio/tasks.py
101
hassio/tasks.py
@ -7,7 +7,7 @@ from .exceptions import HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_WATCHDOG_API = 'HASS_WATCHDOG_API'
|
||||
HASS_WATCHDOG_API = "HASS_WATCHDOG_API"
|
||||
|
||||
RUN_UPDATE_SUPERVISOR = 29100
|
||||
RUN_UPDATE_ADDONS = 57600
|
||||
@ -17,6 +17,7 @@ RUN_RELOAD_ADDONS = 21600
|
||||
RUN_RELOAD_SNAPSHOTS = 72000
|
||||
RUN_RELOAD_HOST = 72000
|
||||
RUN_RELOAD_UPDATER = 21600
|
||||
RUN_RELOAD_INGRESS = 930
|
||||
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
@ -33,28 +34,55 @@ class Tasks(CoreSysAttributes):
|
||||
|
||||
async def load(self):
|
||||
"""Add Tasks to scheduler."""
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._update_addons, RUN_UPDATE_ADDONS))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._update_supervisor, RUN_UPDATE_SUPERVISOR))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI))
|
||||
# Update
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self._update_addons, RUN_UPDATE_ADDONS)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._update_supervisor, RUN_UPDATE_SUPERVISOR
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI
|
||||
)
|
||||
)
|
||||
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_addons.reload, RUN_RELOAD_ADDONS))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_updater.reload, RUN_RELOAD_UPDATER))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_snapshots.reload, RUN_RELOAD_SNAPSHOTS))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self.sys_host.reload, RUN_RELOAD_HOST))
|
||||
# Reload
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self.sys_addons.reload, RUN_RELOAD_ADDONS)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self.sys_updater.reload, RUN_RELOAD_UPDATER
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self.sys_snapshots.reload, RUN_RELOAD_SNAPSHOTS
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self.sys_host.reload, RUN_RELOAD_HOST)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self.sys_ingress.reload, RUN_RELOAD_INGRESS
|
||||
)
|
||||
)
|
||||
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_docker,
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
||||
self.jobs.add(self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_api,
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||
# Watchdog
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_docker, RUN_WATCHDOG_HOMEASSISTANT_DOCKER
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_api, RUN_WATCHDOG_HOMEASSISTANT_API
|
||||
)
|
||||
)
|
||||
|
||||
_LOGGER.info("All core tasks are scheduled")
|
||||
|
||||
@ -72,7 +100,8 @@ class Tasks(CoreSysAttributes):
|
||||
tasks.append(addon.update())
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Add-on %s will be ignored, schema tests fails", addon.slug)
|
||||
"Add-on %s will be ignored, schema tests fails", addon.slug
|
||||
)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Add-on auto update process %d tasks", len(tasks))
|
||||
@ -94,14 +123,18 @@ class Tasks(CoreSysAttributes):
|
||||
async def _watchdog_homeassistant_docker(self):
|
||||
"""Check running state of Docker and start if they is close."""
|
||||
# if Home Assistant is active
|
||||
if not await self.sys_homeassistant.is_fails() or \
|
||||
not self.sys_homeassistant.watchdog or \
|
||||
self.sys_homeassistant.error_state:
|
||||
if (
|
||||
not await self.sys_homeassistant.is_fails()
|
||||
or not self.sys_homeassistant.watchdog
|
||||
or self.sys_homeassistant.error_state
|
||||
):
|
||||
return
|
||||
|
||||
# if Home Assistant is running
|
||||
if self.sys_homeassistant.in_progress or \
|
||||
await self.sys_homeassistant.is_running():
|
||||
if (
|
||||
self.sys_homeassistant.in_progress
|
||||
or await self.sys_homeassistant.is_running()
|
||||
):
|
||||
return
|
||||
|
||||
_LOGGER.warning("Watchdog found a problem with Home Assistant Docker!")
|
||||
@ -117,17 +150,21 @@ class Tasks(CoreSysAttributes):
|
||||
a delay in our system.
|
||||
"""
|
||||
# If Home-Assistant is active
|
||||
if not await self.sys_homeassistant.is_fails() or \
|
||||
not self.sys_homeassistant.watchdog or \
|
||||
self.sys_homeassistant.error_state:
|
||||
if (
|
||||
not await self.sys_homeassistant.is_fails()
|
||||
or not self.sys_homeassistant.watchdog
|
||||
or self.sys_homeassistant.error_state
|
||||
):
|
||||
return
|
||||
|
||||
# Init cache data
|
||||
retry_scan = self._cache.get(HASS_WATCHDOG_API, 0)
|
||||
|
||||
# If Home-Assistant API is up
|
||||
if self.sys_homeassistant.in_progress or \
|
||||
await self.sys_homeassistant.check_api_state():
|
||||
if (
|
||||
self.sys_homeassistant.in_progress
|
||||
or await self.sys_homeassistant.check_api_state()
|
||||
):
|
||||
return
|
||||
|
||||
# Look like we run into a problem
|
||||
|
@ -1,8 +1,6 @@
|
||||
"""Tools file for Hass.io."""
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -14,11 +12,6 @@ def convert_to_ascii(raw) -> str:
|
||||
return RE_STRING.sub("", raw.decode())
|
||||
|
||||
|
||||
def create_token() -> str:
|
||||
"""Create token for API access."""
|
||||
return hashlib.sha256(uuid.uuid4().bytes).hexdigest()
|
||||
|
||||
|
||||
def process_lock(method):
|
||||
"""Wrap function with only run once."""
|
||||
|
||||
|
@ -58,6 +58,11 @@ def parse_datetime(dt_str):
|
||||
return datetime(**kws)
|
||||
|
||||
|
||||
def utcnow():
|
||||
def utcnow() -> datetime:
|
||||
"""Return the current timestamp including timezone."""
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
def utc_from_timestamp(timestamp: float) -> datetime:
|
||||
"""Return a UTC time from a timestamp."""
|
||||
return UTC.localize(datetime.utcfromtimestamp(timestamp))
|
||||
|
@ -1,36 +1,36 @@
|
||||
"""Validate functions."""
|
||||
import uuid
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import (
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_HASSOS,
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HASSIO,
|
||||
ATTR_BOOT,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_SSL,
|
||||
ATTR_PORT,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_UUID,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_HASSOS_CLI,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
CHANNEL_STABLE,
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_BOOT,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_HASSIO,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HASSOS_CLI,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SESSION,
|
||||
ATTR_SSL,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_UUID,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CHANNEL_BETA,
|
||||
CHANNEL_DEV,
|
||||
CHANNEL_STABLE,
|
||||
)
|
||||
from .utils.validate import validate_timezone
|
||||
|
||||
|
||||
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
@ -139,3 +139,12 @@ SCHEMA_HASSIO_CONFIG = vol.Schema(
|
||||
|
||||
|
||||
SCHEMA_AUTH_CONFIG = vol.Schema({SHA256: SHA256})
|
||||
|
||||
|
||||
SCHEMA_INGRESS_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SESSION, default=dict): vol.Schema(
|
||||
{vol.Coerce(str): vol.Coerce(float)}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
@ -11,32 +11,30 @@ from hassio.bootstrap import initialize_coresys
|
||||
@pytest.fixture
|
||||
def docker():
|
||||
"""Mock Docker API."""
|
||||
with patch('hassio.coresys.DockerAPI') as mock:
|
||||
with patch("hassio.coresys.DockerAPI") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def coresys(loop, docker):
|
||||
"""Create a CoreSys Mock."""
|
||||
with patch('hassio.bootstrap.initialize_system_data'):
|
||||
with patch("hassio.bootstrap.initialize_system_data"):
|
||||
coresys_obj = await initialize_coresys()
|
||||
|
||||
coresys_obj.ingress.save_data = MagicMock()
|
||||
|
||||
yield coresys_obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sys_machine():
|
||||
"""Mock sys_machine."""
|
||||
with patch(
|
||||
'hassio.coresys.CoreSys.machine',
|
||||
new_callable=PropertyMock) as mock:
|
||||
with patch("hassio.coresys.CoreSys.machine", new_callable=PropertyMock) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sys_supervisor():
|
||||
with patch(
|
||||
'hassio.coresys.CoreSys.supervisor',
|
||||
new_callable=PropertyMock) as mock:
|
||||
with patch("hassio.coresys.CoreSys.supervisor", new_callable=PropertyMock) as mock:
|
||||
mock.return_value = MagicMock()
|
||||
yield MagicMock
|
||||
|
22
tests/test_ingress.py
Normal file
22
tests/test_ingress.py
Normal file
@ -0,0 +1,22 @@
|
||||
"""Test ingress."""
|
||||
from datetime import timedelta
|
||||
|
||||
from hassio.utils.dt import utc_from_timestamp
|
||||
|
||||
|
||||
def test_session_handling(coresys):
|
||||
"""Create and test session."""
|
||||
session = coresys.ingress.create_session()
|
||||
validate = coresys.ingress.sessions[session]
|
||||
|
||||
assert coresys.ingress.save_data.called
|
||||
assert session
|
||||
assert validate
|
||||
|
||||
assert coresys.ingress.validate_session(session)
|
||||
assert coresys.ingress.sessions[session] != validate
|
||||
|
||||
not_valid = utc_from_timestamp(validate) - timedelta(minutes=20)
|
||||
coresys.ingress.sessions[session] = not_valid.timestamp()
|
||||
assert not coresys.ingress.validate_session(session)
|
||||
assert not coresys.ingress.validate_session("invalid session")
|
Loading…
x
Reference in New Issue
Block a user