mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-28 11:36:32 +00:00
Merge remote-tracking branch 'origin/dev'
Release 110
This commit is contained in:
commit
13243cd02c
@ -17,7 +17,7 @@ RUN apk add --no-cache \
|
|||||||
python3-dev \
|
python3-dev \
|
||||||
g++ \
|
g++ \
|
||||||
&& pip3 install --no-cache-dir \
|
&& pip3 install --no-cache-dir \
|
||||||
uvloop==0.10.1 \
|
uvloop==0.10.2 \
|
||||||
cchardet==2.1.1 \
|
cchardet==2.1.1 \
|
||||||
pycryptodome==3.4.11 \
|
pycryptodome==3.4.11 \
|
||||||
&& apk del .build-dependencies
|
&& apk del .build-dependencies
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Init file for HassIO addons."""
|
"""Init file for HassIO addons."""
|
||||||
import copy
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -11,7 +10,7 @@ from .utils import extract_hash_from_path
|
|||||||
from .validate import (
|
from .validate import (
|
||||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
FILE_HASSIO_ADDONS, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..utils.json import JsonConfig, read_json_file
|
from ..utils.json import JsonConfig, read_json_file
|
||||||
@ -70,9 +69,6 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
|||||||
if repository_element.is_dir():
|
if repository_element.is_dir():
|
||||||
self._read_git_repository(repository_element)
|
self._read_git_repository(repository_element)
|
||||||
|
|
||||||
# update local data
|
|
||||||
self._merge_config()
|
|
||||||
|
|
||||||
def _read_git_repository(self, path):
|
def _read_git_repository(self, path):
|
||||||
"""Process a custom repository folder."""
|
"""Process a custom repository folder."""
|
||||||
slug = extract_hash_from_path(path)
|
slug = extract_hash_from_path(path)
|
||||||
@ -138,25 +134,3 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
|||||||
# local repository
|
# local repository
|
||||||
self._repositories[REPOSITORY_LOCAL] = \
|
self._repositories[REPOSITORY_LOCAL] = \
|
||||||
builtin_data[REPOSITORY_LOCAL]
|
builtin_data[REPOSITORY_LOCAL]
|
||||||
|
|
||||||
def _merge_config(self):
|
|
||||||
"""Update local config if they have update.
|
|
||||||
|
|
||||||
It need to be the same version as the local version is for merge.
|
|
||||||
"""
|
|
||||||
have_change = False
|
|
||||||
|
|
||||||
for addon in set(self.system):
|
|
||||||
# detached
|
|
||||||
if addon not in self._cache:
|
|
||||||
continue
|
|
||||||
|
|
||||||
cache = self._cache[addon]
|
|
||||||
data = self.system[addon]
|
|
||||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
|
||||||
if data != cache:
|
|
||||||
self.system[addon] = copy.deepcopy(cache)
|
|
||||||
have_change = True
|
|
||||||
|
|
||||||
if have_change:
|
|
||||||
self.save_data()
|
|
||||||
|
@ -74,6 +74,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes([
|
||||||
web.get('/hassos/info', api_hassos.info),
|
web.get('/hassos/info', api_hassos.info),
|
||||||
|
web.post('/hassos/update', api_hassos.update),
|
||||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ import logging
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .utils import api_process
|
from .utils import api_process, api_validate
|
||||||
from ..const import ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST
|
from ..const import ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
@ -23,10 +23,18 @@ class APIHassOS(CoreSysAttributes):
|
|||||||
"""Return hassos information."""
|
"""Return hassos information."""
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: self.sys_hassos.version,
|
ATTR_VERSION: self.sys_hassos.version,
|
||||||
ATTR_VERSION_LATEST: self.sys_hassos.version,
|
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||||
ATTR_BOARD: self.sys_hassos.board,
|
ATTR_BOARD: self.sys_hassos.board,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request):
|
||||||
|
"""Update HassOS."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||||
|
|
||||||
|
await asyncio.shield(self.sys_hassos.update(version))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def config_sync(self, request):
|
def config_sync(self, request):
|
||||||
"""Trigger config reload on HassOS."""
|
"""Trigger config reload on HassOS."""
|
||||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
1
hassio/api/panel/chunk.fdf0834c750e40935b6f.js
Normal file
1
hassio/api/panel/chunk.fdf0834c750e40935b6f.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.fdf0834c750e40935b6f.js.gz
Normal file
BIN
hassio/api/panel/chunk.fdf0834c750e40935b6f.js.gz
Normal file
Binary file not shown.
@ -1 +1 @@
|
|||||||
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],a=0,c=[];a<i.length;a++)o=i[a],r[o]&&c.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(f&&f(n);c.length;)c.shift()()}var t={},r={6:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,a=document.getElementsByTagName("head")[0],f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"f3880aa331d3ef2ddf32",1:"a8e86d80be46b3b6e16d",2:"2cdff35c6685a5344cd2",3:"ff92199b0d422767d108",4:"c77b56beea1d4547ff5f",5:"c93f37c558ff32991708"}[e]+".js"}(e),u=function(n){f.onerror=f.onload=null,clearTimeout(c);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var c=setTimeout(function(){u({type:"timeout",target:f})},12e4);f.onerror=f.onload=u,a.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var a=0;a<i.length;a++)n(i[a]);var f=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(3)]).then(t.bind(null,1)),Promise.all([t.e(0),t.e(1),t.e(2)]).then(t.bind(null,2))})}]);
|
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],a=0,c=[];a<i.length;a++)o=i[a],r[o]&&c.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(f&&f(n);c.length;)c.shift()()}var t={},r={6:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,a=document.getElementsByTagName("head")[0],f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"f3880aa331d3ef2ddf32",1:"a8e86d80be46b3b6e16d",2:"fdf0834c750e40935b6f",3:"ff92199b0d422767d108",4:"c77b56beea1d4547ff5f",5:"c93f37c558ff32991708"}[e]+".js"}(e),u=function(n){f.onerror=f.onload=null,clearTimeout(c);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var c=setTimeout(function(){u({type:"timeout",target:f})},12e4);f.onerror=f.onload=u,a.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var a=0;a<i.length;a++)n(i[a]);var f=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(3)]).then(t.bind(null,1)),Promise.all([t.e(0),t.e(1),t.e(2)]).then(t.bind(null,2))})}]);
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@ -34,7 +34,6 @@ def api_process(method):
|
|||||||
except RuntimeError as err:
|
except RuntimeError as err:
|
||||||
return api_return_error(message=str(err))
|
return api_return_error(message=str(err))
|
||||||
except HassioError:
|
except HassioError:
|
||||||
_LOGGER.exception("Hassio error")
|
|
||||||
return api_return_error()
|
return api_return_error()
|
||||||
|
|
||||||
if isinstance(answer, dict):
|
if isinstance(answer, dict):
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '109'
|
HASSIO_VERSION = '110'
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||||
URL_HASSIO_VERSION = \
|
URL_HASSIO_VERSION = \
|
||||||
@ -10,6 +10,10 @@ URL_HASSIO_VERSION = \
|
|||||||
URL_HASSIO_APPARMOR = \
|
URL_HASSIO_APPARMOR = \
|
||||||
"https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
"https://s3.amazonaws.com/hassio-version/apparmor.txt"
|
||||||
|
|
||||||
|
URL_HASSOS_OTA = (
|
||||||
|
"https://github.com/home-assistant/hassos/releases/download/"
|
||||||
|
"{version}/hassos_{board}-{version}.raucb")
|
||||||
|
|
||||||
HASSIO_DATA = Path("/data")
|
HASSIO_DATA = Path("/data")
|
||||||
|
|
||||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||||
@ -169,6 +173,7 @@ ATTR_APPARMOR = 'apparmor'
|
|||||||
ATTR_DEVICETREE = 'devicetree'
|
ATTR_DEVICETREE = 'devicetree'
|
||||||
ATTR_CPE = 'cpe'
|
ATTR_CPE = 'cpe'
|
||||||
ATTR_BOARD = 'board'
|
ATTR_BOARD = 'board'
|
||||||
|
ATTR_HASSOS = 'hassos'
|
||||||
|
|
||||||
SERVICE_MQTT = 'mqtt'
|
SERVICE_MQTT = 'mqtt'
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from .systemd import Systemd
|
from .systemd import Systemd
|
||||||
from .hostname import Hostname
|
from .hostname import Hostname
|
||||||
|
from .rauc import Rauc
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
|
||||||
@ -11,8 +12,10 @@ class DBusManager(CoreSysAttributes):
|
|||||||
def __init__(self, coresys):
|
def __init__(self, coresys):
|
||||||
"""Initialize DBus Interface."""
|
"""Initialize DBus Interface."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
|
|
||||||
self._systemd = Systemd()
|
self._systemd = Systemd()
|
||||||
self._hostname = Hostname()
|
self._hostname = Hostname()
|
||||||
|
self._rauc = Rauc()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def systemd(self):
|
def systemd(self):
|
||||||
@ -24,7 +27,13 @@ class DBusManager(CoreSysAttributes):
|
|||||||
"""Return hostname Interface."""
|
"""Return hostname Interface."""
|
||||||
return self._hostname
|
return self._hostname
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rauc(self):
|
||||||
|
"""Return rauc Interface."""
|
||||||
|
return self._rauc
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Connect interfaces to dbus."""
|
"""Connect interfaces to dbus."""
|
||||||
await self.systemd.connect()
|
await self.systemd.connect()
|
||||||
await self.hostname.connect()
|
await self.hostname.connect()
|
||||||
|
await self.rauc.connect()
|
||||||
|
55
hassio/dbus/rauc.py
Normal file
55
hassio/dbus/rauc.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
"""DBus interface for rauc."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .interface import DBusInterface
|
||||||
|
from .utils import dbus_connected
|
||||||
|
from ..exceptions import DBusError
|
||||||
|
from ..utils.gdbus import DBus
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DBUS_NAME = 'de.pengutronix.rauc'
|
||||||
|
DBUS_OBJECT = '/'
|
||||||
|
|
||||||
|
|
||||||
|
class Rauc(DBusInterface):
|
||||||
|
"""Handle DBus interface for rauc."""
|
||||||
|
|
||||||
|
async def connect(self):
|
||||||
|
"""Connect do bus."""
|
||||||
|
try:
|
||||||
|
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||||
|
except DBusError:
|
||||||
|
_LOGGER.warning("Can't connect to rauc")
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
def install(self, raucb_file):
|
||||||
|
"""Install rauc bundle file.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.dbus.Installer.Install(raucb_file)
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
def get_slot_status(self):
|
||||||
|
"""Get slot status.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.dbus.Installer.GetSlotStatus()
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
def get_properties(self):
|
||||||
|
"""Return rauc informations.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||||
|
|
||||||
|
@dbus_connected
|
||||||
|
def signal_completed(self):
|
||||||
|
"""Return a signal wrapper for completed signal.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
@ -11,6 +11,30 @@ class HassioNotSupportedError(HassioError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# HassOS
|
||||||
|
|
||||||
|
class HassOSError(HassioError):
|
||||||
|
"""HassOS exception."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HassOSUpdateError(HassOSError):
|
||||||
|
"""Error on update of a HassOS."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HassOSNotSupportedError(HassioNotSupportedError):
|
||||||
|
"""Function not supported by HassOS."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Updater
|
||||||
|
|
||||||
|
class HassioUpdaterError(HassioError):
|
||||||
|
"""Error on Updater."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# Host
|
# Host
|
||||||
|
|
||||||
class HostError(HassioError):
|
class HostError(HassioError):
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
"""HassOS support on supervisor."""
|
"""HassOS support on supervisor."""
|
||||||
import logging
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
from cpe import CPE
|
from cpe import CPE
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
from .exceptions import HassioNotSupportedError
|
from .const import URL_HASSOS_OTA
|
||||||
|
from .exceptions import HassOSNotSupportedError, HassOSUpdateError, DBusError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -29,6 +32,11 @@ class HassOS(CoreSysAttributes):
|
|||||||
"""Return version of HassOS."""
|
"""Return version of HassOS."""
|
||||||
return self._version
|
return self._version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_latest(self):
|
||||||
|
"""Return version of HassOS."""
|
||||||
|
return self.sys_updater.version_hassos
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def board(self):
|
def board(self):
|
||||||
"""Return board name."""
|
"""Return board name."""
|
||||||
@ -38,16 +46,47 @@ class HassOS(CoreSysAttributes):
|
|||||||
"""Check if HassOS is availabe."""
|
"""Check if HassOS is availabe."""
|
||||||
if not self.available:
|
if not self.available:
|
||||||
_LOGGER.error("No HassOS availabe")
|
_LOGGER.error("No HassOS availabe")
|
||||||
raise HassioNotSupportedError()
|
raise HassOSNotSupportedError()
|
||||||
|
|
||||||
|
async def _download_raucb(self, version):
|
||||||
|
"""Download rauc bundle (OTA) from github."""
|
||||||
|
url = URL_HASSOS_OTA.format(version=version, board=self.board)
|
||||||
|
raucb = Path(self.sys_config.path_tmp, f"hassos-{version}.raucb")
|
||||||
|
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Fetch OTA update from %s", url)
|
||||||
|
async with self.sys_websession.get(url) as request:
|
||||||
|
with raucb.open('wb') as ota_file:
|
||||||
|
while True:
|
||||||
|
chunk = await request.content.read(1048576)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
ota_file.write(chunk)
|
||||||
|
|
||||||
|
_LOGGER.info("OTA update is downloaded on %s", raucb)
|
||||||
|
return raucb
|
||||||
|
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||||
|
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't write ota file: %s", err)
|
||||||
|
|
||||||
|
raise HassOSUpdateError()
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Load HassOS data."""
|
"""Load HassOS data."""
|
||||||
try:
|
try:
|
||||||
|
# Check needed host functions
|
||||||
|
assert self.sys_dbus.rauc.is_connected
|
||||||
|
assert self.sys_dbus.systemd.is_connected
|
||||||
|
assert self.sys_dbus.hostname.is_connected
|
||||||
|
|
||||||
assert self.sys_host.info.cpe is not None
|
assert self.sys_host.info.cpe is not None
|
||||||
cpe = CPE(self.sys_host.info.cpe)
|
cpe = CPE(self.sys_host.info.cpe)
|
||||||
assert cpe.get_product()[0] == 'hassos'
|
assert cpe.get_product()[0] == 'hassos'
|
||||||
except (NotImplementedError, IndexError, AssertionError):
|
except (AssertionError, NotImplementedError):
|
||||||
_LOGGER.info("Can't detect HassOS")
|
_LOGGER.debug("Ignore HassOS")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Store meta data
|
# Store meta data
|
||||||
@ -58,8 +97,48 @@ class HassOS(CoreSysAttributes):
|
|||||||
_LOGGER.info("Detect HassOS %s on host system", self.version)
|
_LOGGER.info("Detect HassOS %s on host system", self.version)
|
||||||
|
|
||||||
def config_sync(self):
|
def config_sync(self):
|
||||||
"""Trigger a host config reload from usb."""
|
"""Trigger a host config reload from usb.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
self._check_host()
|
self._check_host()
|
||||||
|
|
||||||
_LOGGER.info("Sync config from USB on HassOS.")
|
_LOGGER.info("Sync config from USB on HassOS.")
|
||||||
return self.sys_host.services.restart('hassos-config.service')
|
return self.sys_host.services.restart('hassos-config.service')
|
||||||
|
|
||||||
|
async def update(self, version=None):
|
||||||
|
"""Update HassOS system."""
|
||||||
|
version = version or self.version_latest
|
||||||
|
|
||||||
|
# Check installed version
|
||||||
|
self._check_host()
|
||||||
|
if version == self.version:
|
||||||
|
_LOGGER.warning("Version %s is already installed", version)
|
||||||
|
raise HassOSUpdateError()
|
||||||
|
|
||||||
|
# Fetch files from internet
|
||||||
|
int_ota = await self._download_raucb(version)
|
||||||
|
ext_ota = Path(self.sys_config.path_extern_tmp, int_ota.name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_dbus.rauc.install(ext_ota)
|
||||||
|
completed = await self.sys_dbus.rauc.signal_completed()
|
||||||
|
|
||||||
|
except DBusError:
|
||||||
|
_LOGGER.error("Rauc communication error")
|
||||||
|
raise HassOSUpdateError() from None
|
||||||
|
|
||||||
|
finally:
|
||||||
|
int_ota.unlink()
|
||||||
|
|
||||||
|
# Update success
|
||||||
|
if 0 in completed:
|
||||||
|
_LOGGER.info("Install HassOS %s success", version)
|
||||||
|
self.sys_create_task(self.sys_host.control.reboot())
|
||||||
|
return
|
||||||
|
|
||||||
|
# Update fails
|
||||||
|
rauc_status = await self.sys_dbus.get_properties()
|
||||||
|
_LOGGER.error(
|
||||||
|
"HassOS update fails with: %s", rauc_status.get('LastError'))
|
||||||
|
raise HassOSUpdateError()
|
||||||
|
@ -211,7 +211,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
exists = await self.instance.exists()
|
exists = await self.instance.exists()
|
||||||
|
|
||||||
if exists and version == self.instance.version:
|
if exists and version == self.instance.version:
|
||||||
_LOGGER.info("Version %s is already installed", version)
|
_LOGGER.warning("Version %s is already installed", version)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
"""Fetch last versions from webserver."""
|
"""Fetch last versions from webserver."""
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import json
|
import json
|
||||||
@ -9,11 +8,12 @@ import aiohttp
|
|||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||||
ATTR_CHANNEL)
|
ATTR_CHANNEL, ATTR_HASSOS)
|
||||||
from .coresys import CoreSysAttributes
|
from .coresys import CoreSysAttributes
|
||||||
from .utils import AsyncThrottle
|
from .utils import AsyncThrottle
|
||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_UPDATER_CONFIG
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
|
from .exceptions import HassioUpdaterError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -26,12 +26,15 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
|
|
||||||
def load(self):
|
async def load(self):
|
||||||
"""Update internal data.
|
"""Update internal data."""
|
||||||
|
with suppress(HassioUpdaterError):
|
||||||
|
await self.fetch_data()
|
||||||
|
|
||||||
Return a coroutine.
|
async def reload(self):
|
||||||
"""
|
"""Update internal data."""
|
||||||
return self.reload()
|
with suppress(HassioUpdaterError):
|
||||||
|
await self.fetch_data()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version_homeassistant(self):
|
def version_homeassistant(self):
|
||||||
@ -43,6 +46,11 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
"""Return last version of hassio."""
|
"""Return last version of hassio."""
|
||||||
return self._data.get(ATTR_HASSIO)
|
return self._data.get(ATTR_HASSIO)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_hassos(self):
|
||||||
|
"""Return last version of hassos."""
|
||||||
|
return self._data.get(ATTR_HASSOS)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def channel(self):
|
def channel(self):
|
||||||
"""Return upstream channel of hassio instance."""
|
"""Return upstream channel of hassio instance."""
|
||||||
@ -54,38 +62,47 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
self._data[ATTR_CHANNEL] = value
|
self._data[ATTR_CHANNEL] = value
|
||||||
|
|
||||||
@AsyncThrottle(timedelta(seconds=60))
|
@AsyncThrottle(timedelta(seconds=60))
|
||||||
async def reload(self):
|
async def fetch_data(self):
|
||||||
"""Fetch current versions from github.
|
"""Fetch current versions from github.
|
||||||
|
|
||||||
Is a coroutine.
|
Is a coroutine.
|
||||||
"""
|
"""
|
||||||
url = URL_HASSIO_VERSION.format(channel=self.channel)
|
url = URL_HASSIO_VERSION.format(channel=self.channel)
|
||||||
|
machine = self.sys_machine or 'default'
|
||||||
|
board = self.sys_hassos.board
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Fetch update data from %s", url)
|
_LOGGER.info("Fetch update data from %s", url)
|
||||||
async with self.sys_websession.get(url, timeout=10) as request:
|
async with self.sys_websession.get(url, timeout=10) as request:
|
||||||
data = await request.json(content_type=None)
|
data = await request.json(content_type=None)
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
except aiohttp.ClientError as err:
|
||||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||||
return
|
raise HassioUpdaterError() from None
|
||||||
|
|
||||||
except json.JSONDecodeError as err:
|
except json.JSONDecodeError as err:
|
||||||
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
||||||
return
|
raise HassioUpdaterError() from None
|
||||||
|
|
||||||
# data valid?
|
# data valid?
|
||||||
if not data or data.get(ATTR_CHANNEL) != self.channel:
|
if not data or data.get(ATTR_CHANNEL) != self.channel:
|
||||||
_LOGGER.warning("Invalid data from %s", url)
|
_LOGGER.warning("Invalid data from %s", url)
|
||||||
return
|
raise HassioUpdaterError() from None
|
||||||
|
|
||||||
# update supervisor versions
|
try:
|
||||||
with suppress(KeyError):
|
# update supervisor version
|
||||||
self._data[ATTR_HASSIO] = data['supervisor']
|
self._data[ATTR_HASSIO] = data['supervisor']
|
||||||
|
|
||||||
# update Home Assistant version
|
# update Home Assistant version
|
||||||
machine = self.sys_machine or 'default'
|
self._data[ATTR_HOMEASSISTANT] = data['homeassistant'][machine]
|
||||||
with suppress(KeyError):
|
|
||||||
self._data[ATTR_HOMEASSISTANT] = \
|
|
||||||
data['homeassistant'][machine]
|
|
||||||
|
|
||||||
|
# update hassos version
|
||||||
|
if self.sys_hassos.available and board:
|
||||||
|
self._data[ATTR_HASSOS] = data['hassos'][board]
|
||||||
|
|
||||||
|
except KeyError as err:
|
||||||
|
_LOGGER.warning("Can't process version data: %s", err)
|
||||||
|
raise HassioUpdaterError() from None
|
||||||
|
|
||||||
|
else:
|
||||||
self.save_data()
|
self.save_data()
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
"""Tools file for HassIO."""
|
"""Tools file for HassIO."""
|
||||||
import asyncio
|
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import async_timeout
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
UTC = pytz.utc
|
UTC = pytz.utc
|
||||||
@ -29,11 +27,10 @@ async def fetch_timezone(websession):
|
|||||||
"""Read timezone from freegeoip."""
|
"""Read timezone from freegeoip."""
|
||||||
data = {}
|
data = {}
|
||||||
try:
|
try:
|
||||||
with async_timeout.timeout(10):
|
async with websession.get(FREEGEOIP_URL, timeout=10) as request:
|
||||||
async with websession.get(FREEGEOIP_URL) as request:
|
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
except aiohttp.ClientError as err:
|
||||||
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
|
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
|
||||||
|
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
|
@ -4,6 +4,7 @@ import logging
|
|||||||
import json
|
import json
|
||||||
import shlex
|
import shlex
|
||||||
import re
|
import re
|
||||||
|
from signal import SIGINT
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from ..exceptions import DBusFatalError, DBusParseError
|
from ..exceptions import DBusFatalError, DBusParseError
|
||||||
@ -20,11 +21,14 @@ RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[|\())'(.*?)'(?=(?:|]|}|,|\)))")
|
|||||||
RE_GVARIANT_TUPLE_O = re.compile(r"\"[^\"]*?\"|(\()")
|
RE_GVARIANT_TUPLE_O = re.compile(r"\"[^\"]*?\"|(\()")
|
||||||
RE_GVARIANT_TUPLE_C = re.compile(r"\"[^\"]*?\"|(,?\))")
|
RE_GVARIANT_TUPLE_C = re.compile(r"\"[^\"]*?\"|(,?\))")
|
||||||
|
|
||||||
|
RE_MONITOR_OUTPUT = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
|
||||||
|
|
||||||
# Commands for dbus
|
# Commands for dbus
|
||||||
INTROSPECT = ("gdbus introspect --system --dest {bus} "
|
INTROSPECT = ("gdbus introspect --system --dest {bus} "
|
||||||
"--object-path {object} --xml")
|
"--object-path {object} --xml")
|
||||||
CALL = ("gdbus call --system --dest {bus} --object-path {object} "
|
CALL = ("gdbus call --system --dest {bus} --object-path {object} "
|
||||||
"--method {method} {args}")
|
"--method {method} {args}")
|
||||||
|
MONITOR = ("gdbus monitor --system --dest {bus}")
|
||||||
|
|
||||||
DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll'
|
DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll'
|
||||||
|
|
||||||
@ -37,6 +41,7 @@ class DBus:
|
|||||||
self.bus_name = bus_name
|
self.bus_name = bus_name
|
||||||
self.object_path = object_path
|
self.object_path = object_path
|
||||||
self.methods = set()
|
self.methods = set()
|
||||||
|
self.signals = set()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def connect(bus_name, object_path):
|
async def connect(bus_name, object_path):
|
||||||
@ -69,12 +74,19 @@ class DBus:
|
|||||||
_LOGGER.debug("data: %s", data)
|
_LOGGER.debug("data: %s", data)
|
||||||
for interface in xml.findall("./interface"):
|
for interface in xml.findall("./interface"):
|
||||||
interface_name = interface.get('name')
|
interface_name = interface.get('name')
|
||||||
|
|
||||||
|
# Methods
|
||||||
for method in interface.findall("./method"):
|
for method in interface.findall("./method"):
|
||||||
method_name = method.get('name')
|
method_name = method.get('name')
|
||||||
self.methods.add(f"{interface_name}.{method_name}")
|
self.methods.add(f"{interface_name}.{method_name}")
|
||||||
|
|
||||||
|
# Signals
|
||||||
|
for signal in interface.findall("./signal"):
|
||||||
|
signal_name = signal.get('name')
|
||||||
|
self.signals.add(f"{interface_name}.{signal_name}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _gvariant(raw):
|
def parse_gvariant(raw):
|
||||||
"""Parse GVariant input to python."""
|
"""Parse GVariant input to python."""
|
||||||
raw = RE_GVARIANT_TYPE.sub("", raw)
|
raw = RE_GVARIANT_TYPE.sub("", raw)
|
||||||
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
|
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
|
||||||
@ -108,7 +120,7 @@ class DBus:
|
|||||||
data = await self._send(command)
|
data = await self._send(command)
|
||||||
|
|
||||||
# Parse and return data
|
# Parse and return data
|
||||||
return self._gvariant(data)
|
return self.parse_gvariant(data)
|
||||||
|
|
||||||
async def get_properties(self, interface):
|
async def get_properties(self, interface):
|
||||||
"""Read all properties from interface."""
|
"""Read all properties from interface."""
|
||||||
@ -143,6 +155,17 @@ class DBus:
|
|||||||
# End
|
# End
|
||||||
return data.decode()
|
return data.decode()
|
||||||
|
|
||||||
|
def attach_signals(self, filters=None):
|
||||||
|
"""Generate a signals wrapper."""
|
||||||
|
return DBusSignalWrapper(self, filters)
|
||||||
|
|
||||||
|
async def wait_signal(self, signal):
|
||||||
|
"""Wait for single event."""
|
||||||
|
monitor = DBusSignalWrapper(self, [signal])
|
||||||
|
async with monitor as signals:
|
||||||
|
async for signal in signals:
|
||||||
|
return signal
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
"""Mapping to dbus method."""
|
"""Mapping to dbus method."""
|
||||||
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
||||||
@ -176,3 +199,71 @@ class DBusCallWrapper:
|
|||||||
return self.dbus.call_dbus(interface, *args)
|
return self.dbus.call_dbus(interface, *args)
|
||||||
|
|
||||||
return _method_wrapper
|
return _method_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class DBusSignalWrapper:
|
||||||
|
"""Process Signals."""
|
||||||
|
|
||||||
|
def __init__(self, dbus, signals=None):
|
||||||
|
"""Initialize dbus signal wrapper."""
|
||||||
|
self.dbus = dbus
|
||||||
|
self._signals = signals
|
||||||
|
self._proc = None
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
"""Start monitor events."""
|
||||||
|
_LOGGER.info("Start dbus monitor on %s", self.dbus.bus_name)
|
||||||
|
command = shlex.split(MONITOR.format(
|
||||||
|
bus=self.dbus.bus_name
|
||||||
|
))
|
||||||
|
self._proc = await asyncio.create_subprocess_exec(
|
||||||
|
*command,
|
||||||
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||||
|
"""Stop monitor events."""
|
||||||
|
_LOGGER.info("Stop dbus monitor on %s", self.dbus.bus_name)
|
||||||
|
self._proc.send_signal(SIGINT)
|
||||||
|
await self._proc.communicate()
|
||||||
|
|
||||||
|
async def __aiter__(self):
|
||||||
|
"""Start Iteratation."""
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self):
|
||||||
|
"""Get next data."""
|
||||||
|
if not self._proc:
|
||||||
|
raise StopAsyncIteration()
|
||||||
|
|
||||||
|
# Read signals
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
data = await self._proc.stdout.readline()
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
raise StopAsyncIteration() from None
|
||||||
|
|
||||||
|
# Program close
|
||||||
|
if not data:
|
||||||
|
raise StopAsyncIteration()
|
||||||
|
|
||||||
|
# Extract metadata
|
||||||
|
match = RE_MONITOR_OUTPUT.match(data.decode())
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
signal = match.group('signal')
|
||||||
|
data = match.group('data')
|
||||||
|
|
||||||
|
# Filter signals?
|
||||||
|
if self._signals and signal not in self._signals:
|
||||||
|
_LOGGER.debug("Skip event %s - %s", signal, data)
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.dbus.parse_gvariant(data)
|
||||||
|
except DBusParseError:
|
||||||
|
raise StopAsyncIteration() from None
|
||||||
|
@ -6,7 +6,7 @@ import voluptuous as vol
|
|||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_TIMEZONE,
|
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_TIMEZONE, ATTR_HASSOS,
|
||||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||||
ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG,
|
ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG,
|
||||||
ATTR_WAIT_BOOT, ATTR_UUID, CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
|
ATTR_WAIT_BOOT, ATTR_UUID, CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
|
||||||
@ -99,6 +99,7 @@ SCHEMA_UPDATER_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS,
|
vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS,
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_HASSOS): vol.Coerce(str),
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 313a3dd2c93a85b47b78bf9ee76d81ac43d64239
|
Subproject commit 42026f096fdb382ae751bcbab60b947aea8b3d25
|
Loading…
x
Reference in New Issue
Block a user