mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-11 10:09:21 +00:00
Compare commits
173 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
803eb0f8c9 | ||
![]() |
58c5ed7ba1 | ||
![]() |
c4d7d671d1 | ||
![]() |
9d88255225 | ||
![]() |
bfbc366f55 | ||
![]() |
0f30a23f3e | ||
![]() |
7e1bb42bb7 | ||
![]() |
251a43216e | ||
![]() |
4801b9903c | ||
![]() |
cd5a09938f | ||
![]() |
14bf834224 | ||
![]() |
8aec943a5c | ||
![]() |
d817e75d98 | ||
![]() |
fbd8abdcd5 | ||
![]() |
ca02977505 | ||
![]() |
6533b57c6d | ||
![]() |
0a818282d3 | ||
![]() |
ce2f5f9f7a | ||
![]() |
01f767e66c | ||
![]() |
106ab924e3 | ||
![]() |
d031594bf9 | ||
![]() |
f2f146063b | ||
![]() |
5abe7a3fb9 | ||
![]() |
f592971b6e | ||
![]() |
ed2caa0d81 | ||
![]() |
0b04c90b1f | ||
![]() |
2eac4b8d9b | ||
![]() |
143a358b0c | ||
![]() |
fa049066fc | ||
![]() |
3877dcf355 | ||
![]() |
bfa7443ae2 | ||
![]() |
253962df87 | ||
![]() |
f8fbee68f4 | ||
![]() |
3c5d4037f7 | ||
![]() |
772709dd75 | ||
![]() |
bcfd76d33c | ||
![]() |
2bbe7e7dc1 | ||
![]() |
dbcd090244 | ||
![]() |
a0a1fd4875 | ||
![]() |
d978ec00aa | ||
![]() |
e40963a686 | ||
![]() |
55ec1a84fa | ||
![]() |
cf154b57f3 | ||
![]() |
ebf4daf4cc | ||
![]() |
40e8f411ff | ||
![]() |
421b380043 | ||
![]() |
5ebf2068b2 | ||
![]() |
e5fc6846e0 | ||
![]() |
906c4e03fb | ||
![]() |
02c8baef68 | ||
![]() |
a14917e017 | ||
![]() |
7e5b2673dc | ||
![]() |
d31895123e | ||
![]() |
6c1456902e | ||
![]() |
03bed162f4 | ||
![]() |
f798e75e30 | ||
![]() |
710f8570d2 | ||
![]() |
4dfd11ffb4 | ||
![]() |
4e4368debb | ||
![]() |
30c7ddf4ef | ||
![]() |
7186f5a8c0 | ||
![]() |
f52d1c4509 | ||
![]() |
4dbece8e8e | ||
![]() |
f731c630a6 | ||
![]() |
0ac96c207e | ||
![]() |
e2a29b7290 | ||
![]() |
f107a73e28 | ||
![]() |
2c68e5801f | ||
![]() |
91502a0727 | ||
![]() |
872f1d0ae3 | ||
![]() |
3c4240a8a8 | ||
![]() |
7a470bb3ac | ||
![]() |
766a9af54e | ||
![]() |
ca303a62f2 | ||
![]() |
90030d3a28 | ||
![]() |
0ed48a7741 | ||
![]() |
a33d765776 | ||
![]() |
6bb4f0e369 | ||
![]() |
56a9f64730 | ||
![]() |
d5eb66bc0d | ||
![]() |
40343089b5 | ||
![]() |
1b887e38d6 | ||
![]() |
ba96f99cde | ||
![]() |
b7f5cc868b | ||
![]() |
c8343fdfb0 | ||
![]() |
91e4bf1676 | ||
![]() |
6dba8d4ef9 | ||
![]() |
65eaed4f90 | ||
![]() |
8233083392 | ||
![]() |
106378d1d0 | ||
![]() |
01d18d5ff3 | ||
![]() |
6d23f3bd1c | ||
![]() |
ef96579a29 | ||
![]() |
44f0a9f21a | ||
![]() |
d854307acb | ||
![]() |
334b41de71 | ||
![]() |
1da50eab7a | ||
![]() |
b119a42f4d | ||
![]() |
99aa438817 | ||
![]() |
99fa91f480 | ||
![]() |
93969d264d | ||
![]() |
711e199977 | ||
![]() |
4e645332c3 | ||
![]() |
df8afb3337 | ||
![]() |
255a33fc08 | ||
![]() |
d15b6f0294 | ||
![]() |
1aa24e40ae | ||
![]() |
c0bde4a488 | ||
![]() |
2a09b70294 | ||
![]() |
e35b0a54c1 | ||
![]() |
8287330c67 | ||
![]() |
6b16da93cd | ||
![]() |
c1cd9bba45 | ||
![]() |
e33420f26e | ||
![]() |
abd9683e11 | ||
![]() |
8cbeabbe21 | ||
![]() |
df7d988d2f | ||
![]() |
544c009b9c | ||
![]() |
b2e0babc60 | ||
![]() |
f7c79cbd3a | ||
![]() |
587e9618da | ||
![]() |
cb2dd3b81c | ||
![]() |
8d4dd7de3f | ||
![]() |
6927c989d0 | ||
![]() |
97853d1691 | ||
![]() |
0cdef0d118 | ||
![]() |
0b17ffc243 | ||
![]() |
c516d46f16 | ||
![]() |
cb8ec22b6d | ||
![]() |
4a5fbd79c1 | ||
![]() |
b636a03567 | ||
![]() |
c96faf7c0a | ||
![]() |
2e1cd4076a | ||
![]() |
9984a638ba | ||
![]() |
a492bccc03 | ||
![]() |
e7a0e0f565 | ||
![]() |
030e081d45 | ||
![]() |
8537536368 | ||
![]() |
f03f323aac | ||
![]() |
58c0c67796 | ||
![]() |
f5e196a663 | ||
![]() |
808df68e57 | ||
![]() |
fa51c2e6e9 | ||
![]() |
ba3760e770 | ||
![]() |
ad1a8557b8 | ||
![]() |
fe91f812d9 | ||
![]() |
4cc11305c7 | ||
![]() |
898c0330c8 | ||
![]() |
33e5f94f1f | ||
![]() |
da4ee63890 | ||
![]() |
d34203b133 | ||
![]() |
23addfb9a6 | ||
![]() |
81e1227a7b | ||
![]() |
75be8666a6 | ||
![]() |
6031a60084 | ||
![]() |
39d5785118 | ||
![]() |
bddcdcadb2 | ||
![]() |
3eac6a3366 | ||
![]() |
3c7b962cf9 | ||
![]() |
bd756e2a9c | ||
![]() |
e7920bee2a | ||
![]() |
ebcc21370e | ||
![]() |
34c4acf199 | ||
![]() |
47e45dfc9f | ||
![]() |
2ecea7c1b4 | ||
![]() |
5c0eccd12f | ||
![]() |
f34ab9402b | ||
![]() |
2569a82caf | ||
![]() |
4bdd256000 | ||
![]() |
6f4f6338c5 | ||
![]() |
7cb72b55a8 | ||
![]() |
7a8ee2c46a | ||
![]() |
6e9ef17a28 |
@@ -2,7 +2,7 @@ sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.5"
|
||||
- python: "3.6"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
|
193
API.md
193
API.md
@@ -34,18 +34,17 @@ The addons from `addons` are only installed one.
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url"
|
||||
"logo": "bool",
|
||||
"state": "started|stopped",
|
||||
}
|
||||
],
|
||||
"addons_repositories": [
|
||||
@@ -54,38 +53,6 @@ The addons from `addons` are only installed one.
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/supervisor/addons`
|
||||
|
||||
Get all available addons
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "null|WEBSITE",
|
||||
"maintainer": "null|BLA BLU <fla@dld.ch>"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/update`
|
||||
Optional:
|
||||
```json
|
||||
@@ -98,6 +65,7 @@ Optional:
|
||||
```json
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"timezone": "TIMEZONE",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
@@ -146,7 +114,80 @@ Return QR-Code
|
||||
}
|
||||
```
|
||||
|
||||
### Backup/Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
```json
|
||||
{
|
||||
"snapshots": [
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
```json
|
||||
{
|
||||
"name": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/partial`
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- GET `/snapshots/{slug}/info`
|
||||
```json
|
||||
{
|
||||
"slug": "SNAPSHOT ID",
|
||||
"type": "full|partial",
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"homeassistant": {
|
||||
"version": "INSTALLED_HASS_VERSION",
|
||||
"devices": []
|
||||
},
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
"folders": ["NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
@@ -176,6 +217,11 @@ Optional:
|
||||
### Network
|
||||
|
||||
- GET `/network/info`
|
||||
```json
|
||||
{
|
||||
"hostname": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/network/options`
|
||||
```json
|
||||
@@ -196,7 +242,10 @@ Optional:
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION"
|
||||
"last_version": "LAST_VERSION",
|
||||
"devices": [""],
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -214,33 +263,92 @@ Output the raw docker log
|
||||
|
||||
- POST `/homeassistant/restart`
|
||||
|
||||
- POST `/homeassistant/options`
|
||||
```json
|
||||
{
|
||||
"devices": [],
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null"
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
### REST API addons
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
Get all available addons
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name|unknown",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "WEBSITE|REPOSITORY",
|
||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/reload`
|
||||
|
||||
- GET `/addons/{addon}/info`
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"description": "description",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"repository": "12345678|null",
|
||||
"version": "VERSION",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
"state": "started|stopped",
|
||||
"state": "none|started|stopped",
|
||||
"boot": "auto|manual",
|
||||
"build": "bool",
|
||||
"options": {},
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"host_network": "bool",
|
||||
"logo": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"auto_update": "bool",
|
||||
"network": {
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
}
|
||||
```
|
||||
|
||||
For reset custom network settings, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
- POST `/addons/{addon}/stop`
|
||||
@@ -281,8 +389,10 @@ Communicate over unix socket with a host daemon.
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
|
||||
# hostname xy
|
||||
|
||||
# network info
|
||||
# network hostname xy
|
||||
-> {}
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
@@ -294,6 +404,7 @@ features:
|
||||
- shutdown
|
||||
- reboot
|
||||
- update
|
||||
- hostname
|
||||
- network_info
|
||||
- network_control
|
||||
|
||||
|
16
README.md
16
README.md
@@ -9,18 +9,6 @@ Hass.io is a Docker based system for managing your Home Assistant installation a
|
||||
|
||||
**HassIO is under active development and is not ready yet for production use.**
|
||||
|
||||
## Installing Hass.io
|
||||
## Installation
|
||||
|
||||
Looks to our [website](https://home-assistant.io/hassio).
|
||||
|
||||
# HomeAssistant
|
||||
|
||||
## SSL
|
||||
|
||||
All addons that create SSL certs follow the same file structure. If you use one, put follow lines in your `configuration.yaml`.
|
||||
|
||||
```yaml
|
||||
http:
|
||||
ssl_certificate: /ssl/fullchain.pem
|
||||
ssl_key: /ssl/privkey.pem
|
||||
```
|
||||
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
@@ -17,16 +18,27 @@ if __name__ == "__main__":
|
||||
exit(1)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
hassio = core.HassIO(loop)
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hassio setup")
|
||||
config = bootstrap.initialize_system_data()
|
||||
hassio = core.HassIO(loop, config)
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio task")
|
||||
_LOGGER.info("Start Hassio")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
|
||||
_LOGGER.info("Run Hassio loop")
|
||||
loop.run_forever()
|
||||
|
||||
_LOGGER.info("Cleanup system")
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
|
@@ -1,218 +1,133 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import asyncio
|
||||
import logging
|
||||
import shutil
|
||||
|
||||
from .data import AddonsData
|
||||
from .git import AddonsRepoHassIO, AddonsRepoCustom
|
||||
from ..const import STATE_STOPPED, STATE_STARTED
|
||||
from ..dock.addon import DockerAddon
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import Data
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
class AddonManager(AddonsData):
|
||||
|
||||
class AddonManager(object):
|
||||
"""Manage addons inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config)
|
||||
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.dock = dock
|
||||
self.repositories = []
|
||||
self.dockers = {}
|
||||
self.data = Data(config)
|
||||
self.addons = {}
|
||||
self.repositories = {}
|
||||
|
||||
async def prepare(self, arch):
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all addons."""
|
||||
return list(self.addons.values())
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self.repositories.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return a adddon from slug."""
|
||||
return self.addons.get(addon_slug)
|
||||
|
||||
async def prepare(self):
|
||||
"""Startup addon management."""
|
||||
self.arch = arch
|
||||
self.data.reload()
|
||||
|
||||
# init hassio repository
|
||||
self.repositories.append(AddonsRepoHassIO(self.config, self.loop))
|
||||
# init hassio built-in repositories
|
||||
repositories = \
|
||||
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# init custom repositories
|
||||
for url in self.config.addons_repositories:
|
||||
self.repositories.append(
|
||||
AddonsRepoCustom(self.config, self.loop, url))
|
||||
|
||||
# load addon repository
|
||||
tasks = [addon.load() for addon in self.repositories]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# read data from repositories
|
||||
self.read_data_from_repositories()
|
||||
self.merge_update_config()
|
||||
|
||||
# load installed addons
|
||||
for addon in self.list_installed:
|
||||
self.dockers[addon] = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
await self.dockers[addon].attach()
|
||||
|
||||
async def add_git_repository(self, url):
|
||||
"""Add a new custom repository."""
|
||||
if url in self.config.addons_repositories:
|
||||
_LOGGER.warning("Repository already exists %s", url)
|
||||
return False
|
||||
|
||||
repo = AddonsRepoCustom(self.config, self.loop, url)
|
||||
|
||||
if not await repo.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return False
|
||||
|
||||
self.config.addons_repositories = url
|
||||
self.repositories.append(repo)
|
||||
return True
|
||||
|
||||
def drop_git_repository(self, url):
|
||||
"""Remove a custom repository."""
|
||||
for repo in self.repositories:
|
||||
if repo.url == url:
|
||||
self.repositories.remove(repo)
|
||||
self.config.drop_addon_repository(url)
|
||||
repo.remove()
|
||||
return True
|
||||
|
||||
return False
|
||||
# init custom repositories & load addons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
tasks = [addon.pull() for addon in self.repositories]
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# read data from repositories
|
||||
self.read_data_from_repositories()
|
||||
self.merge_update_config()
|
||||
|
||||
# remove stalled addons
|
||||
for addon in self.list_detached:
|
||||
_LOGGER.warning("Dedicated addon '%s' found!", addon)
|
||||
|
||||
async def auto_boot(self, start_type):
|
||||
"""Boot addons with mode auto."""
|
||||
boot_list = self.list_startup(start_type)
|
||||
tasks = [self.start(addon) for addon in boot_list]
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def install(self, addon, version=None):
|
||||
"""Install a addon."""
|
||||
if not self.exists_addon(addon):
|
||||
_LOGGER.error("Addon %s not exists for install", addon)
|
||||
return False
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
|
||||
if self.arch not in self.get_arch(addon):
|
||||
_LOGGER.error("Addon %s not supported on %s", addon, self.arch)
|
||||
return False
|
||||
# update addons
|
||||
await self.load_addons()
|
||||
|
||||
if self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already installed", addon)
|
||||
return False
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories)
|
||||
|
||||
if not self.path_data(addon).is_dir():
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
self.path_data(addon).mkdir()
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.config, self.loop, self.data, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories[url] = repository
|
||||
|
||||
addon_docker = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.addons_repositories = url
|
||||
|
||||
version = version or self.get_last_version(addon)
|
||||
if not await addon_docker.install(version):
|
||||
return False
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
self.dockers[addon] = addon_docker
|
||||
self.set_addon_install(addon, version)
|
||||
return True
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories.pop(url).remove()
|
||||
self.config.drop_addon_repository(url)
|
||||
|
||||
async def uninstall(self, addon):
|
||||
"""Remove a addon."""
|
||||
if not self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already uninstalled", addon)
|
||||
return False
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
async def load_addons(self):
|
||||
"""Update/add internal addon store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
if not await self.dockers[addon].remove():
|
||||
return False
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons)
|
||||
del_addons = set(self.addons) - all_addons
|
||||
|
||||
if self.path_data(addon).is_dir():
|
||||
_LOGGER.info("Remove Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
shutil.rmtree(str(self.path_data(addon)))
|
||||
_LOGGER.info("Load addons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
self.dockers.pop(addon)
|
||||
self.set_addon_uninstall(addon)
|
||||
return True
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(
|
||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
||||
|
||||
async def state(self, addon):
|
||||
"""Return running state of addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return
|
||||
tasks.append(addon.load())
|
||||
self.addons[addon_slug] = addon
|
||||
|
||||
if await self.dockers[addon].is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def start(self, addon):
|
||||
"""Set options and start addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons.pop(addon_slug)
|
||||
|
||||
if not self.write_addon_options(addon):
|
||||
_LOGGER.error("Can't write options for addon %s", addon)
|
||||
return False
|
||||
async def auto_boot(self, stage):
|
||||
"""Boot addons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
return await self.dockers[addon].run()
|
||||
|
||||
async def stop(self, addon):
|
||||
"""Stop addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].stop()
|
||||
|
||||
async def update(self, addon, version=None):
|
||||
"""Update addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
version = version or self.get_last_version(addon)
|
||||
is_running = await self.dockers[addon].is_running()
|
||||
|
||||
# update
|
||||
if await self.dockers[addon].update(version):
|
||||
self.set_addon_update(addon, version)
|
||||
if is_running:
|
||||
await self.start(addon)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def restart(self, addon):
|
||||
"""Restart addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].restart()
|
||||
|
||||
async def logs(self, addon):
|
||||
"""Return addons log output."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].logs()
|
||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
519
hassio/addons/addon.py
Normal file
519
hassio/addons/addon.py
Normal file
@@ -0,0 +1,519 @@
|
||||
"""Init file for HassIO addons."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI)
|
||||
from .util import check_installed
|
||||
from ..dock.addon import DockerAddon
|
||||
from ..tools import write_json_file, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
RE_WEBUI = re.compile(r"^(.*\[HOST\]:)\[PORT:(\d+)\](.*)$")
|
||||
|
||||
|
||||
class Addon(object):
|
||||
"""Hold data for addon inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock, data, slug):
|
||||
"""Initialize data holder."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.data = data
|
||||
self._id = slug
|
||||
|
||||
self.addon_docker = DockerAddon(config, loop, dock, self)
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.addon_docker.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return addon data from system or cache."""
|
||||
return self.data.system.get(self._id, self.data.cache.get(self._id))
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if a addon is installed."""
|
||||
return self._id in self.data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
"""Return True if addon is detached."""
|
||||
return self._id not in self.data.cache
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
"""Return installed version."""
|
||||
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
def _set_install(self, version):
|
||||
"""Set addon as installed."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.data.save()
|
||||
|
||||
def _set_uninstall(self):
|
||||
"""Set addon as uninstalled."""
|
||||
self.data.system.pop(self._id, None)
|
||||
self.data.user.pop(self._id, None)
|
||||
self.data.save()
|
||||
|
||||
def _set_update(self, version):
|
||||
"""Update version of addon."""
|
||||
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
|
||||
self.data.user[self._id][ATTR_VERSION] = version
|
||||
self.data.save()
|
||||
|
||||
def _restore_data(self, user, system):
|
||||
"""Restore data to addon."""
|
||||
self.data.user[self._id] = deepcopy(user)
|
||||
self.data.system[self._id] = deepcopy(system)
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return {
|
||||
**self.data.system[self._id][ATTR_OPTIONS],
|
||||
**self.data.user[self._id][ATTR_OPTIONS],
|
||||
}
|
||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
"""Store user addon options."""
|
||||
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_BOOT]
|
||||
return self._mesh[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Store user boot options."""
|
||||
self.data.user[self._id][ATTR_BOOT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def auto_update(self):
|
||||
"""Return if auto update is enable."""
|
||||
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value):
|
||||
"""Set auto update."""
|
||||
self.data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of addon."""
|
||||
return self._mesh[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self._mesh[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of addon."""
|
||||
return self._mesh[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def repository(self):
|
||||
"""Return repository of addon."""
|
||||
return self._mesh[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return version of addon."""
|
||||
if self._id in self.data.cache:
|
||||
return self.data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
def startup(self):
|
||||
"""Return startup type of addon."""
|
||||
return self._mesh.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of addon."""
|
||||
if self.network_mode != 'bridge' or ATTR_PORTS not in self._mesh:
|
||||
return
|
||||
|
||||
if not self.is_installed or \
|
||||
ATTR_NETWORK not in self.data.user[self._id]:
|
||||
return self._mesh[ATTR_PORTS]
|
||||
return self.data.user[self._id][ATTR_NETWORK]
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value):
|
||||
"""Set custom ports of addon."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self.data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def webui(self):
|
||||
"""Return URL to webui or None."""
|
||||
if ATTR_WEBUI not in self._mesh:
|
||||
return
|
||||
|
||||
webui = self._mesh[ATTR_WEBUI]
|
||||
dock_port = RE_WEBUI.sub(r"\2", webui)
|
||||
if self.ports is None:
|
||||
real_port = dock_port
|
||||
else:
|
||||
real_port = self.ports.get("{}/tcp".format(dock_port), dock_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(real_port, (tuple, list)):
|
||||
real_port = real_port[-1]
|
||||
|
||||
return RE_WEBUI.sub(r"\g<1>{}\g<3>".format(real_port), webui)
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode of addon."""
|
||||
if self._mesh[ATTR_HOST_NETWORK]:
|
||||
return 'host'
|
||||
return 'bridge'
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices of addon."""
|
||||
return self._mesh.get(ATTR_DEVICES)
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs of addon."""
|
||||
return self._mesh.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment of addon."""
|
||||
return self._mesh.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of addon."""
|
||||
return self._mesh.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_logo(self):
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self):
|
||||
"""Return list of supported arch."""
|
||||
return self._mesh[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of addon."""
|
||||
addon_data = self._mesh
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.config.arch,
|
||||
addon_data[ATTR_SLUG])
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
"""Return True if this addon need a local build."""
|
||||
return ATTR_IMAGE not in self._mesh
|
||||
|
||||
@property
|
||||
def map_volumes(self):
|
||||
"""Return a dict of {volume: policy} from addon."""
|
||||
volumes = {}
|
||||
for volume in self._mesh[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or 'ro'
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self.config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self.config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
"""Return path to addons options."""
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_location(self):
|
||||
"""Return path to this addon."""
|
||||
return Path(self._mesh[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_logo(self):
|
||||
"""Return path to addon logo."""
|
||||
return Path(self.path_location, 'logo.png')
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._mesh[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
async def install(self, version=None):
|
||||
"""Install a addon."""
|
||||
if self.config.arch not in self.supported_arch:
|
||||
_LOGGER.error(
|
||||
"Addon %s not supported on %s", self._id, self.config.arch)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
_LOGGER.error("Addon %s is already installed", self._id)
|
||||
return False
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
version = version or self.last_version
|
||||
if not await self.addon_docker.install(version):
|
||||
return False
|
||||
|
||||
self._set_install(version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
if not await self.addon_docker.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Remove Home-Assistant addon data folder %s", self.path_data)
|
||||
shutil.rmtree(str(self.path_data))
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
|
||||
async def state(self):
|
||||
"""Return running state of addon."""
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.addon_docker.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
async def start(self):
|
||||
"""Set options and start addon."""
|
||||
return await self.addon_docker.run()
|
||||
|
||||
@check_installed
|
||||
async def stop(self):
|
||||
"""Stop addon."""
|
||||
return await self.addon_docker.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self, version=None):
|
||||
"""Update addon."""
|
||||
version = version or self.last_version
|
||||
|
||||
if version == self.version_installed:
|
||||
_LOGGER.warning(
|
||||
"Addon %s is already installed in %s", self._id, version)
|
||||
return True
|
||||
|
||||
if not await self.addon_docker.update(version):
|
||||
return False
|
||||
|
||||
self._set_update(version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def restart(self):
|
||||
"""Restart addon."""
|
||||
return await self.addon_docker.restart()
|
||||
|
||||
@check_installed
|
||||
async def logs(self):
|
||||
"""Return addons log output."""
|
||||
return await self.addon_docker.logs()
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.addon_docker.export_image(Path(temp, "image.tar")):
|
||||
return False
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self.data.system.get(self._id, {}),
|
||||
ATTR_VERSION: self.version_installed,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# store local configs/state
|
||||
if not write_json_file(Path(temp, "addon.json"), data):
|
||||
_LOGGER.error("Can't write addon.json for %s", self._id)
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _create_tar():
|
||||
"""Write tar inside loop."""
|
||||
with tarfile.open(tar_file, "w:gz",
|
||||
compresslevel=1) as snapshot:
|
||||
snapshot.add(temp, arcname=".")
|
||||
snapshot.add(self.path_data, arcname="data")
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _create_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore a state of a addon."""
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tar():
|
||||
"""Extract tar snapshot."""
|
||||
with tarfile.open(tar_file, "r:gz") as snapshot:
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _extract_tar)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except (OSError, json.JSONDecodeError) as err:
|
||||
_LOGGER.error("Can't read addon.json -> %s", err)
|
||||
|
||||
# validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data -> %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
|
||||
# restore data / reload addon
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
|
||||
|
||||
# check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if version != self.addon_docker.version:
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
await self.addon_docker.import_image(image_file, version)
|
||||
else:
|
||||
if await self.addon_docker.install(version):
|
||||
await self.addon_docker.cleanup()
|
||||
else:
|
||||
await self.addon_docker.stop()
|
||||
|
||||
# restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
if self.path_data.is_dir():
|
||||
shutil.rmtree(str(self.path_data), ignore_errors=True)
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
try:
|
||||
await self.loop.run_in_executor(None, _restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data -> %s", err)
|
||||
return False
|
||||
|
||||
# run addon
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
return True
|
@@ -1,12 +1,10 @@
|
||||
{
|
||||
"local": {
|
||||
"slug": "local",
|
||||
"name": "Local Add-Ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "By our self"
|
||||
},
|
||||
"core": {
|
||||
"slug": "core",
|
||||
"name": "Built-in Add-Ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
|
@@ -2,56 +2,60 @@
|
||||
import copy
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import extract_hash_from_path
|
||||
from .validate import (
|
||||
validate_options, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG)
|
||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
||||
MAP_VOLUME)
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO,
|
||||
ATTR_SCHEMA, ATTR_IMAGE, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP,
|
||||
ATTR_REPOSITORY, ATTR_URL, ATTR_ARCH, ATTR_LOCATON)
|
||||
from ..config import Config
|
||||
from ..tools import read_json_file, write_json_file
|
||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||
from ..tools import JsonConfig, read_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SYSTEM = 'system'
|
||||
USER = 'user'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
||||
|
||||
|
||||
class AddonsData(Config):
|
||||
class Data(JsonConfig):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS)
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
|
||||
self.config = config
|
||||
self._system_data = self._data.get(SYSTEM, {})
|
||||
self._user_data = self._data.get(USER, {})
|
||||
self._addons_cache = {}
|
||||
self._repositories_data = {}
|
||||
self.arch = None
|
||||
self._repositories = {}
|
||||
self._cache = {}
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
self._data = {
|
||||
USER: self._user_data,
|
||||
SYSTEM: self._system_data,
|
||||
}
|
||||
super().save()
|
||||
@property
|
||||
def user(self):
|
||||
"""Return local addon user data."""
|
||||
return self._data[ATTR_USER]
|
||||
|
||||
def read_data_from_repositories(self):
|
||||
@property
|
||||
def system(self):
|
||||
"""Return local addon data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
"""Return addon data from cache/repositories."""
|
||||
return self._cache
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return addon data from repositories."""
|
||||
return self._repositories
|
||||
|
||||
def reload(self):
|
||||
"""Read data from addons repository."""
|
||||
self._addons_cache = {}
|
||||
self._repositories_data = {}
|
||||
self._cache = {}
|
||||
self._repositories = {}
|
||||
|
||||
# read core repository
|
||||
self._read_addons_folder(
|
||||
@@ -69,19 +73,21 @@ class AddonsData(Config):
|
||||
if repository_element.is_dir():
|
||||
self._read_git_repository(repository_element)
|
||||
|
||||
# update local data
|
||||
self._merge_config()
|
||||
|
||||
def _read_git_repository(self, path):
|
||||
"""Process a custom repository folder."""
|
||||
slug = extract_hash_from_path(path)
|
||||
repository_info = {ATTR_SLUG: slug}
|
||||
|
||||
# exists repository json
|
||||
repository_file = Path(path, "repository.json")
|
||||
try:
|
||||
repository_info.update(SCHEMA_REPOSITORY_CONFIG(
|
||||
repository_info = SCHEMA_REPOSITORY_CONFIG(
|
||||
read_json_file(repository_file)
|
||||
))
|
||||
)
|
||||
|
||||
except OSError:
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read repository information from %s",
|
||||
repository_file)
|
||||
return
|
||||
@@ -91,7 +97,7 @@ class AddonsData(Config):
|
||||
return
|
||||
|
||||
# process data
|
||||
self._repositories_data[slug] = repository_info
|
||||
self._repositories[slug] = repository_info
|
||||
self._read_addons_folder(path, slug)
|
||||
|
||||
def _read_addons_folder(self, path, repository):
|
||||
@@ -110,7 +116,7 @@ class AddonsData(Config):
|
||||
# store
|
||||
addon_config[ATTR_REPOSITORY] = repository
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._addons_cache[addon_slug] = addon_config
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
@@ -128,250 +134,32 @@ class AddonsData(Config):
|
||||
_LOGGER.warning("Can't read built-in.json -> %s", err)
|
||||
return
|
||||
|
||||
# if core addons are available
|
||||
for data in self._addons_cache.values():
|
||||
if data[ATTR_REPOSITORY] == REPOSITORY_CORE:
|
||||
self._repositories_data[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
break
|
||||
# core repository
|
||||
self._repositories[REPOSITORY_CORE] = \
|
||||
builtin_data[REPOSITORY_CORE]
|
||||
|
||||
# if local addons are available
|
||||
for data in self._addons_cache.values():
|
||||
if data[ATTR_REPOSITORY] == REPOSITORY_LOCAL:
|
||||
self._repositories_data[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
break
|
||||
# local repository
|
||||
self._repositories[REPOSITORY_LOCAL] = \
|
||||
builtin_data[REPOSITORY_LOCAL]
|
||||
|
||||
def merge_update_config(self):
|
||||
def _merge_config(self):
|
||||
"""Update local config if they have update.
|
||||
|
||||
It need to be the same version as the local version is.
|
||||
It need to be the same version as the local version is for merge.
|
||||
"""
|
||||
have_change = False
|
||||
|
||||
for addon in self.list_installed:
|
||||
for addon in set(self.system):
|
||||
# detached
|
||||
if addon not in self._addons_cache:
|
||||
if addon not in self._cache:
|
||||
continue
|
||||
|
||||
cache = self._addons_cache[addon]
|
||||
data = self._system_data[addon]
|
||||
cache = self._cache[addon]
|
||||
data = self.system[addon]
|
||||
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||
if data != cache:
|
||||
self._system_data[addon] = copy.deepcopy(cache)
|
||||
self.system[addon] = copy.deepcopy(cache)
|
||||
have_change = True
|
||||
|
||||
if have_change:
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed addons."""
|
||||
return set(self._system_data)
|
||||
|
||||
@property
|
||||
def list_all(self):
|
||||
"""Return a dict of all addons."""
|
||||
return set(self._system_data) | set(self._addons_cache)
|
||||
|
||||
def list_startup(self, start_type):
|
||||
"""Get list of installed addon with need start by type."""
|
||||
addon_list = set()
|
||||
for addon in self._system_data.keys():
|
||||
if self.get_boot(addon) != BOOT_AUTO:
|
||||
continue
|
||||
|
||||
try:
|
||||
if self._system_data[addon][ATTR_STARTUP] == start_type:
|
||||
addon_list.add(addon)
|
||||
except KeyError:
|
||||
_LOGGER.warning("Orphaned addon detect %s", addon)
|
||||
continue
|
||||
|
||||
return addon_list
|
||||
|
||||
@property
|
||||
def list_detached(self):
|
||||
"""Return local addons they not support from repo."""
|
||||
addon_list = set()
|
||||
for addon in self._system_data.keys():
|
||||
if addon not in self._addons_cache:
|
||||
addon_list.add(addon)
|
||||
|
||||
return addon_list
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of addon repositories."""
|
||||
return list(self._repositories_data.values())
|
||||
|
||||
def exists_addon(self, addon):
|
||||
"""Return True if a addon exists."""
|
||||
return addon in self._addons_cache or addon in self._system_data
|
||||
|
||||
def is_installed(self, addon):
|
||||
"""Return True if a addon is installed."""
|
||||
return addon in self._system_data
|
||||
|
||||
def version_installed(self, addon):
|
||||
"""Return installed version."""
|
||||
return self._user_data.get(addon, {}).get(ATTR_VERSION)
|
||||
|
||||
def set_addon_install(self, addon, version):
|
||||
"""Set addon as installed."""
|
||||
self._system_data[addon] = copy.deepcopy(self._addons_cache[addon])
|
||||
self._user_data[addon] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
}
|
||||
self.save()
|
||||
|
||||
def set_addon_uninstall(self, addon):
|
||||
"""Set addon as uninstalled."""
|
||||
self._system_data.pop(addon, None)
|
||||
self._user_data.pop(addon, None)
|
||||
self.save()
|
||||
|
||||
def set_addon_update(self, addon, version):
|
||||
"""Update version of addon."""
|
||||
self._system_data[addon] = copy.deepcopy(self._addons_cache[addon])
|
||||
self._user_data[addon][ATTR_VERSION] = version
|
||||
self.save()
|
||||
|
||||
def set_options(self, addon, options):
|
||||
"""Store user addon options."""
|
||||
self._user_data[addon][ATTR_OPTIONS] = copy.deepcopy(options)
|
||||
self.save()
|
||||
|
||||
def set_boot(self, addon, boot):
|
||||
"""Store user boot options."""
|
||||
self._user_data[addon][ATTR_BOOT] = boot
|
||||
self.save()
|
||||
|
||||
def get_options(self, addon):
|
||||
"""Return options with local changes."""
|
||||
return {
|
||||
**self._system_data[addon][ATTR_OPTIONS],
|
||||
**self._user_data[addon][ATTR_OPTIONS],
|
||||
}
|
||||
|
||||
def get_boot(self, addon):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self._user_data[addon]:
|
||||
return self._user_data[addon][ATTR_BOOT]
|
||||
|
||||
return self._system_data[addon][ATTR_BOOT]
|
||||
|
||||
def get_name(self, addon):
|
||||
"""Return name of addon."""
|
||||
if addon in self._addons_cache:
|
||||
return self._addons_cache[addon][ATTR_NAME]
|
||||
return self._system_data[addon][ATTR_NAME]
|
||||
|
||||
def get_description(self, addon):
|
||||
"""Return description of addon."""
|
||||
if addon in self._addons_cache:
|
||||
return self._addons_cache[addon][ATTR_DESCRIPTON]
|
||||
return self._system_data[addon][ATTR_DESCRIPTON]
|
||||
|
||||
def get_repository(self, addon):
|
||||
"""Return repository of addon."""
|
||||
if addon in self._addons_cache:
|
||||
return self._addons_cache[addon][ATTR_REPOSITORY]
|
||||
return self._system_data[addon][ATTR_REPOSITORY]
|
||||
|
||||
def get_last_version(self, addon):
|
||||
"""Return version of addon."""
|
||||
if addon in self._addons_cache:
|
||||
return self._addons_cache[addon][ATTR_VERSION]
|
||||
return self.version_installed(addon)
|
||||
|
||||
def get_ports(self, addon):
|
||||
"""Return ports of addon."""
|
||||
return self._system_data[addon].get(ATTR_PORTS)
|
||||
|
||||
def get_url(self, addon):
|
||||
"""Return url of addon."""
|
||||
if addon in self._addons_cache:
|
||||
return self._addons_cache[addon].get(ATTR_URL)
|
||||
return self._system_data[addon].get(ATTR_URL)
|
||||
|
||||
def get_arch(self, addon):
|
||||
"""Return list of supported arch."""
|
||||
if addon in self._addons_cache:
|
||||
return self._addons_cache[addon][ATTR_ARCH]
|
||||
return self._system_data[addon][ATTR_ARCH]
|
||||
|
||||
def get_image(self, addon):
|
||||
"""Return image name of addon."""
|
||||
addon_data = self._system_data.get(
|
||||
addon, self._addons_cache.get(addon)
|
||||
)
|
||||
|
||||
# Repository with dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
return addon_data[ATTR_IMAGE].format(arch=self.arch)
|
||||
|
||||
# local build
|
||||
return "{}/{}-addon-{}".format(
|
||||
addon_data[ATTR_REPOSITORY], self.arch, addon_data[ATTR_SLUG])
|
||||
|
||||
def need_build(self, addon):
|
||||
"""Return True if this addon need a local build."""
|
||||
addon_data = self._system_data.get(
|
||||
addon, self._addons_cache.get(addon)
|
||||
)
|
||||
return ATTR_IMAGE not in addon_data
|
||||
|
||||
def map_config(self, addon):
|
||||
"""Return True if config map is needed."""
|
||||
return MAP_CONFIG in self._system_data[addon][ATTR_MAP]
|
||||
|
||||
def map_ssl(self, addon):
|
||||
"""Return True if ssl map is needed."""
|
||||
return MAP_SSL in self._system_data[addon][ATTR_MAP]
|
||||
|
||||
def map_addons(self, addon):
|
||||
"""Return True if addons map is needed."""
|
||||
return MAP_ADDONS in self._system_data[addon][ATTR_MAP]
|
||||
|
||||
def map_backup(self, addon):
|
||||
"""Return True if backup map is needed."""
|
||||
return MAP_BACKUP in self._system_data[addon][ATTR_MAP]
|
||||
|
||||
def path_data(self, addon):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return Path(self.config.path_addons_data, addon)
|
||||
|
||||
def path_extern_data(self, addon):
|
||||
"""Return addon data path external for docker."""
|
||||
return PurePath(self.config.path_extern_addons_data, addon)
|
||||
|
||||
def path_addon_options(self, addon):
|
||||
"""Return path to addons options."""
|
||||
return Path(self.path_data(addon), "options.json")
|
||||
|
||||
def path_addon_location(self, addon):
|
||||
"""Return path to this addon."""
|
||||
return Path(self._addons_cache[addon][ATTR_LOCATON])
|
||||
|
||||
def write_addon_options(self, addon):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.get_schema(addon)
|
||||
options = self.get_options(addon)
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_addon_options(addon), options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", addon,
|
||||
humanize_error(options, ex))
|
||||
|
||||
return False
|
||||
|
||||
def get_schema(self, addon):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._system_data[addon][ATTR_SCHEMA]
|
||||
|
||||
schema = vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
return schema
|
||||
|
@@ -12,7 +12,7 @@ from ..const import URL_HASSIO_ADDONS
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonsRepo(object):
|
||||
class GitRepo(object):
|
||||
"""Manage addons git repo."""
|
||||
|
||||
def __init__(self, config, loop, path, url):
|
||||
@@ -77,7 +77,7 @@ class AddonsRepo(object):
|
||||
return True
|
||||
|
||||
|
||||
class AddonsRepoHassIO(AddonsRepo):
|
||||
class GitRepoHassIO(GitRepo):
|
||||
"""HassIO addons repository."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
@@ -86,7 +86,7 @@ class AddonsRepoHassIO(AddonsRepo):
|
||||
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
|
||||
|
||||
|
||||
class AddonsRepoCustom(AddonsRepo):
|
||||
class GitRepoCustom(GitRepo):
|
||||
"""Custom addons repository."""
|
||||
|
||||
def __init__(self, config, loop, url):
|
||||
|
71
hassio/addons/repository.py
Normal file
71
hassio/addons/repository.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Represent a HassIO repository."""
|
||||
from .git import GitRepoHassIO, GitRepoCustom
|
||||
from .util import get_hash_from_repository
|
||||
from ..const import (
|
||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
|
||||
class Repository(object):
|
||||
"""Repository in HassIO."""
|
||||
|
||||
def __init__(self, config, loop, data, repository):
|
||||
"""Initialize repository object."""
|
||||
self.data = data
|
||||
self.source = None
|
||||
self.git = None
|
||||
|
||||
if repository == REPOSITORY_LOCAL:
|
||||
self._id = repository
|
||||
elif repository == REPOSITORY_CORE:
|
||||
self._id = repository
|
||||
self.git = GitRepoHassIO(config, loop)
|
||||
else:
|
||||
self._id = get_hash_from_repository(repository)
|
||||
self.git = GitRepoCustom(config, loop, repository)
|
||||
self.source = repository
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return data struct repository."""
|
||||
return self.data.repositories.get(self._id, {})
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug of repository."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of repository."""
|
||||
return self._mesh.get(ATTR_NAME, UNKNOWN)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of repository."""
|
||||
return self._mesh.get(ATTR_URL, self.source)
|
||||
|
||||
@property
|
||||
def maintainer(self):
|
||||
"""Return url of repository."""
|
||||
return self._mesh.get(ATTR_MAINTAINER, UNKNOWN)
|
||||
|
||||
async def load(self):
|
||||
"""Load addon repository."""
|
||||
if self.git:
|
||||
return await self.git.load()
|
||||
return True
|
||||
|
||||
async def update(self):
|
||||
"""Update addon repository."""
|
||||
if self.git:
|
||||
return await self.git.pull()
|
||||
return True
|
||||
|
||||
def remove(self):
|
||||
"""Remove addon repository."""
|
||||
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
|
||||
raise RuntimeError("Can't remove built-in repositories!")
|
||||
|
||||
self.git.remove()
|
@@ -1,10 +1,12 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
|
||||
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
|
||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_hash_from_repository(name):
|
||||
"""Generate a hash from repository."""
|
||||
@@ -21,6 +23,13 @@ def extract_hash_from_path(path):
|
||||
return repo_dir
|
||||
|
||||
|
||||
def create_hash_index_list(name_list):
|
||||
"""Create a dict with hash from repositories list."""
|
||||
return {get_hash_from_repository(repo): repo for repo in name_list}
|
||||
def check_installed(method):
|
||||
"""Wrap function with check if addon is installed."""
|
||||
async def wrap_check(addon, *args, **kwargs):
|
||||
"""Return False if not installed or the function."""
|
||||
if not addon.is_installed:
|
||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
||||
return False
|
||||
return await method(addon, *args, **kwargs)
|
||||
|
||||
return wrap_check
|
||||
|
@@ -3,10 +3,18 @@ import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
|
||||
STARTUP_BEFORE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, MAP_SSL,
|
||||
MAP_CONFIG, MAP_ADDONS, MAP_BACKUP, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH,
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386)
|
||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
|
||||
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
|
||||
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
|
||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS
|
||||
|
||||
|
||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
@@ -14,13 +22,36 @@ V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL])
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
||||
|
||||
ARCH_ALL = [
|
||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||
]
|
||||
|
||||
STARTUP_ALL = [
|
||||
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION
|
||||
]
|
||||
|
||||
PRIVILEGE_ALL = [
|
||||
"NET_ADMIN"
|
||||
]
|
||||
|
||||
|
||||
def _migrate_startup(value):
|
||||
"""Migrate startup schema.
|
||||
|
||||
REMOVE after 0.50-
|
||||
"""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
return STARTUP_APPLICATION
|
||||
return value
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
@@ -30,20 +61,28 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||
vol.All(_migrate_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): dict,
|
||||
vol.Optional(ATTR_MAP, default=[]): [
|
||||
vol.In([MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP])
|
||||
],
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGE_ALL)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): {
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
])
|
||||
},
|
||||
], vol.Schema({vol.Coerce(str): ADDON_ELEMENT}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
@@ -55,6 +94,41 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default={}): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default={}): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
"""Validate schema."""
|
||||
def validate(struct):
|
||||
@@ -69,8 +143,11 @@ def validate_options(raw_schema):
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value
|
||||
options[key] = _nested_validate(typ[0], value, key)
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
@@ -103,14 +180,16 @@ def _single_validate(typ, value, key):
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
return vol.Url()(value)
|
||||
elif typ == V_PORT:
|
||||
return NETWORK_PORT(value)
|
||||
|
||||
raise vol.Invalid("Fatal error for {} type {}.".format(key, typ))
|
||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||
except ValueError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
||||
|
||||
|
||||
def _nested_validate(typ, data_list, key):
|
||||
def _nested_validate_list(typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
@@ -121,7 +200,7 @@ def _nested_validate(typ, data_list, key):
|
||||
for c_key, c_value in element.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}.".format(c_key))
|
||||
"Unknown nested options {}".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
options.append(c_options)
|
||||
@@ -130,3 +209,16 @@ def _nested_validate(typ, data_list, key):
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
|
||||
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
return options
|
||||
|
@@ -10,6 +10,7 @@ from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .supervisor import APISupervisor
|
||||
from .security import APISecurity
|
||||
from .snapshots import APISnapshots
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,15 +44,15 @@ class RestAPI(object):
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, supervisor, addons, host_control):
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
websession):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, addons, host_control)
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, websession)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get(
|
||||
'/supervisor/addons', api_supervisor.available_addons)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
@@ -65,6 +66,7 @@ class RestAPI(object):
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
@@ -73,6 +75,9 @@ class RestAPI(object):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
@@ -87,6 +92,7 @@ class RestAPI(object):
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
|
||||
def register_security(self):
|
||||
"""Register security function."""
|
||||
@@ -97,12 +103,37 @@ class RestAPI(object):
|
||||
self.webapp.router.add_post('/security/totp', api_security.totp)
|
||||
self.webapp.router.add_post('/security/session', api_security.session)
|
||||
|
||||
def register_snapshots(self, snapshots):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
|
||||
def register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
panel_dir = Path(__file__).parents[1].joinpath('panel')
|
||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
||||
|
||||
self.webapp.router.register_resource(
|
||||
web.StaticResource('/panel', str(panel_dir)))
|
||||
def get_panel(request):
|
||||
"""Return file response with panel."""
|
||||
return web.FileResponse(panel)
|
||||
|
||||
self.webapp.router.add_get('/panel', get_panel)
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
|
@@ -9,7 +9,11 @@ from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
ATTR_BUILD, STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL)
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||
from ..validate import DOCKER_PORTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -17,8 +21,11 @@ SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL])
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
})
|
||||
|
||||
|
||||
@@ -33,51 +40,98 @@ class APIAddons(object):
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = request.match_info.get('addon')
|
||||
|
||||
# check data
|
||||
if not self.addons.exists_addon(addon):
|
||||
addon = self.addons.get(request.match_info.get('addon'))
|
||||
if not addon:
|
||||
raise RuntimeError("Addon not exists")
|
||||
if check_installed and not self.addons.is_installed(addon):
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise RuntimeError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return addon information."""
|
||||
addon = self._extract_addon(request)
|
||||
async def list(self, request):
|
||||
"""Return all addons / repositories ."""
|
||||
data_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_NAME: self.addons.get_name(addon),
|
||||
ATTR_DESCRIPTON: self.addons.get_description(addon),
|
||||
ATTR_VERSION: self.addons.version_installed(addon),
|
||||
ATTR_REPOSITORY: self.addons.get_repository(addon),
|
||||
ATTR_LAST_VERSION: self.addons.get_last_version(addon),
|
||||
ATTR_STATE: await self.addons.state(addon),
|
||||
ATTR_BOOT: self.addons.get_boot(addon),
|
||||
ATTR_OPTIONS: self.addons.get_options(addon),
|
||||
ATTR_URL: self.addons.get_url(addon),
|
||||
ATTR_DETACHED: addon in self.addons.list_detached,
|
||||
ATTR_BUILD: self.addons.need_build(addon),
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: data_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all addons data."""
|
||||
await asyncio.shield(self.addons.reload(), loop=self.loop)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return addon information."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Store user options for addon."""
|
||||
addon = self._extract_addon(request)
|
||||
options_schema = self.addons.get_schema(addon)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): options_schema,
|
||||
vol.Optional(ATTR_OPTIONS): addon.schema,
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in body:
|
||||
self.addons.set_options(addon, body[ATTR_OPTIONS])
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
self.addons.set_boot(addon, body[ATTR_BOOT])
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
if ATTR_NETWORK in body:
|
||||
addon.ports = body[ATTR_NETWORK]
|
||||
|
||||
return True
|
||||
|
||||
@@ -86,77 +140,65 @@ class APIAddons(object):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_last_version(addon))
|
||||
|
||||
# check if arch supported
|
||||
if self.addons.arch not in self.addons.get_arch(addon):
|
||||
raise RuntimeError(
|
||||
"Addon is not supported on {}".format(self.addons.arch))
|
||||
version = body.get(ATTR_VERSION)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.install(addon, version), loop=self.loop)
|
||||
addon.install(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.uninstall(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def start(self, request):
|
||||
"""Start addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STARTED:
|
||||
raise RuntimeError("Addon is already running")
|
||||
|
||||
# validate options
|
||||
# check options
|
||||
options = addon.options
|
||||
try:
|
||||
schema = self.addons.get_schema(addon)
|
||||
options = self.addons.get_options(addon)
|
||||
schema(options)
|
||||
addon.schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.start(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.start(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STOPPED:
|
||||
raise RuntimeError("Addon is already stoped")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.stop(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.stop(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_last_version(addon))
|
||||
|
||||
if version == self.addons.version_installed(addon):
|
||||
raise RuntimeError("Version is already in use")
|
||||
version = body.get(ATTR_VERSION)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.update(addon, version), loop=self.loop)
|
||||
addon.update(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restart(self, request):
|
||||
"""Restart addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return await asyncio.shield(self.addons.restart(addon), loop=self.loop)
|
||||
return await asyncio.shield(addon.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return logs from addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return self.addons.logs(addon)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
"""Return logo from addon."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise RuntimeError("No image found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
return png.read()
|
||||
|
@@ -5,10 +5,21 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..const import ATTR_VERSION, ATTR_LAST_VERSION
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..validate import HASS_DEVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
@@ -28,9 +39,26 @@ class APIHomeAssistant(object):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.config.last_homeassistant,
|
||||
ATTR_LAST_VERSION: self.homeassistant.last_version,
|
||||
ATTR_IMAGE: self.homeassistant.image,
|
||||
ATTR_DEVICES: self.homeassistant.devices,
|
||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set homeassistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_DEVICES in body:
|
||||
self.homeassistant.devices = body[ATTR_DEVICES]
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.homeassistant.set_custom(
|
||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update homeassistant."""
|
||||
@@ -40,9 +68,6 @@ class APIHomeAssistant(object):
|
||||
if self.homeassistant.in_progress:
|
||||
raise RuntimeError("Other task is in progress")
|
||||
|
||||
if version == self.homeassistant.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
|
||||
@@ -55,7 +80,7 @@ class APIHomeAssistant(object):
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs.
|
||||
|
||||
|
@@ -1,11 +1,19 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process_hostcontrol
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontrol, api_validate
|
||||
from ..const import ATTR_HOSTNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
@@ -15,12 +23,21 @@ class APINetwork(object):
|
||||
self.loop = loop
|
||||
self.host_control = host_control
|
||||
|
||||
@api_process_hostcontrol
|
||||
def info(self, request):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show network settings."""
|
||||
pass
|
||||
return {
|
||||
ATTR_HOSTNAME: self.host_control.hostname,
|
||||
}
|
||||
|
||||
@api_process_hostcontrol
|
||||
def options(self, request):
|
||||
async def options(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
if self.host_control.hostname != body[ATTR_HOSTNAME]:
|
||||
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
|
||||
|
||||
return True
|
||||
|
134
hassio/api/snapshots.py
Normal file
134
hassio/api/snapshots.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Init file for HassIO snapshot rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..snapshots.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
|
||||
ATTR_DEVICES, ATTR_SNAPSHOTS)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema({
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_FULL = vol.Schema({
|
||||
vol.Optional(ATTR_NAME): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
|
||||
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
|
||||
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
|
||||
})
|
||||
|
||||
|
||||
class APISnapshots(object):
|
||||
"""Handle rest api for snapshot functions."""
|
||||
|
||||
def __init__(self, config, loop, snapshots):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.snapshots = snapshots
|
||||
|
||||
def _extract_snapshot(self, request):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
|
||||
if not snapshot:
|
||||
raise RuntimeError("Snapshot not exists")
|
||||
return snapshot
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return snapshot list."""
|
||||
data_snapshots = []
|
||||
for snapshot in self.snapshots.list_snapshots:
|
||||
data_snapshots.append({
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SNAPSHOTS: data_snapshots,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload snapshot list."""
|
||||
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return snapshot info."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
|
||||
data_addons = []
|
||||
for addon_data in snapshot.addons:
|
||||
data_addons.append({
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_SLUG: snapshot.slug,
|
||||
ATTR_TYPE: snapshot.sys_type,
|
||||
ATTR_NAME: snapshot.name,
|
||||
ATTR_DATE: snapshot.date,
|
||||
ATTR_SIZE: snapshot.size,
|
||||
ATTR_HOMEASSISTANT: {
|
||||
ATTR_VERSION: snapshot.homeassistant_version,
|
||||
ATTR_DEVICES: snapshot.homeassistant_devices,
|
||||
},
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: snapshot.repositories,
|
||||
ATTR_FOLDERS: snapshot.folders,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def snapshot_full(self, request):
|
||||
"""Full-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_full(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def snapshot_partial(self, request):
|
||||
"""Partial-Snapshot a snapshot."""
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request):
|
||||
"""Full-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return self.snapshots.remove(snapshot)
|
@@ -5,13 +5,12 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_raw, api_validate
|
||||
from ..addons.util import create_hash_index_list
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED,
|
||||
ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH,
|
||||
ATTR_BUILD)
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||
from ..tools import validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,6 +18,7 @@ SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
@@ -29,55 +29,16 @@ SCHEMA_VERSION = vol.Schema({
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, addons, host_control):
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, websession):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
|
||||
def _addons_list(self, only_installed=False):
|
||||
"""Return a list of addons."""
|
||||
detached = self.addons.list_detached
|
||||
|
||||
if only_installed:
|
||||
addons = self.addons.list_installed
|
||||
else:
|
||||
addons = self.addons.list_all
|
||||
|
||||
data = []
|
||||
for addon in addons:
|
||||
data.append({
|
||||
ATTR_NAME: self.addons.get_name(addon),
|
||||
ATTR_SLUG: addon,
|
||||
ATTR_DESCRIPTON: self.addons.get_description(addon),
|
||||
ATTR_VERSION: self.addons.get_last_version(addon),
|
||||
ATTR_INSTALLED: self.addons.version_installed(addon),
|
||||
ATTR_ARCH: self.addons.get_arch(addon),
|
||||
ATTR_DETACHED: addon in detached,
|
||||
ATTR_REPOSITORY: self.addons.get_repository(addon),
|
||||
ATTR_BUILD: self.addons.need_build(addon),
|
||||
ATTR_URL: self.addons.get_url(addon),
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def _repositories_list(self):
|
||||
"""Return a list of addons repositories."""
|
||||
data = []
|
||||
list_id = create_hash_index_list(self.config.addons_repositories)
|
||||
|
||||
for repository in self.addons.list_repositories:
|
||||
data.append({
|
||||
ATTR_SLUG: repository[ATTR_SLUG],
|
||||
ATTR_NAME: repository[ATTR_NAME],
|
||||
ATTR_SOURCE: list_id.get(repository[ATTR_SLUG]),
|
||||
ATTR_URL: repository.get(ATTR_URL),
|
||||
ATTR_MAINTAINER: repository.get(ATTR_MAINTAINER),
|
||||
})
|
||||
|
||||
return data
|
||||
self.websession = websession
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
@@ -87,23 +48,30 @@ class APISupervisor(object):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.config.last_hassio,
|
||||
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
||||
ATTR_ARCH: self.addons.arch,
|
||||
ATTR_ADDONS: self._addons_list(only_installed=True),
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def available_addons(self, request):
|
||||
"""Return information for all available addons."""
|
||||
return {
|
||||
ATTR_ADDONS: self._addons_list(),
|
||||
ATTR_REPOSITORIES: self._repositories_list(),
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set supervisor options."""
|
||||
@@ -112,23 +80,12 @@ class APISupervisor(object):
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
old = set(self.config.addons_repositories)
|
||||
|
||||
# add new repositories
|
||||
tasks = [self.addons.add_git_repository(url) for url in
|
||||
set(new - old)]
|
||||
if tasks:
|
||||
await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
|
||||
# remove old repositories
|
||||
for url in set(old - new):
|
||||
self.addons.drop_git_repository(url)
|
||||
|
||||
# read repository
|
||||
self.addons.read_data_from_repositories()
|
||||
await asyncio.shield(self.addons.load_repositories(new))
|
||||
|
||||
return True
|
||||
|
||||
@@ -148,7 +105,9 @@ class APISupervisor(object):
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
tasks = [
|
||||
self.addons.reload(), self.config.fetch_update_infos(),
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.config.fetch_update_infos(self.websession),
|
||||
self.host_control.load()
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
@@ -160,7 +119,7 @@ class APISupervisor(object):
|
||||
|
||||
return True
|
||||
|
||||
@api_process_raw
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs.
|
||||
|
||||
|
@@ -9,7 +9,8 @@ import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR)
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -65,22 +66,30 @@ def api_process_hostcontrol(method):
|
||||
return wrap_hostcontrol
|
||||
|
||||
|
||||
def api_process_raw(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
message = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
message = str(err).encode()
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
def wrap_method(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
msg_data = await method(api, *args, **kwargs)
|
||||
msg_type = content
|
||||
except RuntimeError as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=message)
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
return wrap_api
|
||||
return wrap_api
|
||||
return wrap_method
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
if message:
|
||||
_LOGGER.error(message)
|
||||
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
|
@@ -2,6 +2,7 @@
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
from pathlib import Path
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
@@ -11,9 +12,9 @@ from .config import CoreConfig
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_system_data(websession):
|
||||
def initialize_system_data():
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig(websession)
|
||||
config = CoreConfig()
|
||||
|
||||
# homeassistant config folder
|
||||
if not config.path_config.is_dir():
|
||||
@@ -21,41 +22,57 @@ def initialize_system_data(websession):
|
||||
"Create Home-Assistant config folder %s", config.path_config)
|
||||
config.path_config.mkdir()
|
||||
|
||||
# homeassistant ssl folder
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||
_LOGGER.info("Create hassio ssl folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# homeassistant addon data folder
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
config.path_addons_data)
|
||||
_LOGGER.info(
|
||||
"Create hassio addon data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create Home-Assistant addon local repository folder %s",
|
||||
_LOGGER.info("Create hassio addon local repository folder %s",
|
||||
config.path_addons_local)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create Home-Assistant addon git repositories folder %s",
|
||||
_LOGGER.info("Create hassio addon git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_build.is_dir():
|
||||
_LOGGER.info("Create Home-Assistant addon build folder %s",
|
||||
config.path_addons_build)
|
||||
config.path_addons_build.mkdir(parents=True)
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create hassio temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# homeassistant backup folder
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create Home-Assistant backup folder %s",
|
||||
config.path_backup)
|
||||
_LOGGER.info("Create hassio backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create hassio share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(config):
|
||||
"""Cleanup some stuff after update."""
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old addons build dir.")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
121
hassio/config.py
121
hassio/config.py
@@ -1,16 +1,13 @@
|
||||
"""Bootstrap HassIO."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
||||
from .tools import (
|
||||
fetch_last_versions, write_json_file, read_json_file)
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_DATA
|
||||
from .tools import fetch_last_versions, JsonConfig, validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,20 +18,20 @@ HOMEASSISTANT_LAST = 'homeassistant_last'
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
HASSIO_LAST = 'hassio_last'
|
||||
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
ADDONS_BUILD = PurePath("addons/build")
|
||||
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
|
||||
API_ENDPOINT = 'api_endpoint'
|
||||
TIMEZONE = 'timezone'
|
||||
|
||||
SECURITY_INITIALIZE = 'security_initialize'
|
||||
SECURITY_TOTP = 'security_totp'
|
||||
@@ -46,9 +43,9 @@ SECURITY_SESSIONS = 'security_sessions'
|
||||
SCHEMA_CONFIG = vol.Schema({
|
||||
vol.Optional(UPSTREAM_BETA, default=False): vol.Boolean(),
|
||||
vol.Optional(API_ENDPOINT): vol.Coerce(str),
|
||||
vol.Optional(TIMEZONE, default='UTC'): validate_timezone,
|
||||
vol.Optional(HOMEASSISTANT_LAST): vol.Coerce(str),
|
||||
vol.Optional(HASSIO_LAST): vol.Coerce(str),
|
||||
vol.Optional(HASSIO_CLEANUP): vol.Coerce(str),
|
||||
vol.Optional(ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||
vol.Optional(SECURITY_INITIALIZE, default=False): vol.Boolean(),
|
||||
vol.Optional(SECURITY_TOTP): vol.Coerce(str),
|
||||
@@ -58,51 +55,17 @@ SCHEMA_CONFIG = vol.Schema({
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""Hold all config data."""
|
||||
|
||||
def __init__(self, config_file):
|
||||
"""Initialize config object."""
|
||||
self._file = config_file
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
if not write_json_file(self._file, self._data):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class CoreConfig(Config):
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self, websession):
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
self.websession = websession
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_CONFIG)
|
||||
self.arch = None
|
||||
|
||||
super().__init__(FILE_HASSIO_CONFIG)
|
||||
|
||||
# validate data
|
||||
try:
|
||||
self._data = SCHEMA_CONFIG(self._data)
|
||||
self.save()
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning(
|
||||
"Invalid config %s", humanize_error(self._data, ex))
|
||||
|
||||
async def fetch_update_infos(self):
|
||||
async def fetch_update_infos(self, websession):
|
||||
"""Read current versions from web."""
|
||||
last = await fetch_last_versions(
|
||||
self.websession, beta=self.upstream_beta)
|
||||
last = await fetch_last_versions(websession, beta=self.upstream_beta)
|
||||
|
||||
if last:
|
||||
self._data.update({
|
||||
@@ -133,25 +96,18 @@ class CoreConfig(Config):
|
||||
def upstream_beta(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
|
||||
@property
|
||||
def hassio_cleanup(self):
|
||||
"""Return Version they need to cleanup."""
|
||||
return self._data.get(HASSIO_CLEANUP)
|
||||
|
||||
@hassio_cleanup.setter
|
||||
def hassio_cleanup(self, version):
|
||||
"""Set or remove cleanup flag."""
|
||||
if version is None:
|
||||
self._data.pop(HASSIO_CLEANUP, None)
|
||||
else:
|
||||
self._data[HASSIO_CLEANUP] = version
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return docker homeassistant repository."""
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[TIMEZONE]
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def last_homeassistant(self):
|
||||
@@ -163,6 +119,11 @@ class CoreConfig(Config):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_LAST)
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
"""Return hassio data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return hassio data path extern for docker."""
|
||||
@@ -176,7 +137,7 @@ class CoreConfig(Config):
|
||||
@property
|
||||
def path_config(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return Path(HASSIO_SHARE, HOMEASSISTANT_CONFIG)
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_extern_ssl(self):
|
||||
@@ -186,22 +147,22 @@ class CoreConfig(Config):
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return Path(HASSIO_SHARE, HASSIO_SSL)
|
||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
||||
|
||||
@property
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core addons."""
|
||||
return Path(HASSIO_SHARE, ADDONS_CORE)
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_git(self):
|
||||
"""Return path for git addons."""
|
||||
return Path(HASSIO_SHARE, ADDONS_GIT)
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for customs addons."""
|
||||
return Path(HASSIO_SHARE, ADDONS_LOCAL)
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
@@ -211,7 +172,7 @@ class CoreConfig(Config):
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root addon data folder."""
|
||||
return Path(HASSIO_SHARE, ADDONS_DATA)
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_addons_data(self):
|
||||
@@ -219,20 +180,30 @@ class CoreConfig(Config):
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_addons_build(self):
|
||||
"""Return root addon build folder."""
|
||||
return Path(HASSIO_SHARE, ADDONS_BUILD)
|
||||
def path_tmp(self):
|
||||
"""Return hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
return Path(HASSIO_SHARE, BACKUP_DATA)
|
||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder extern for docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_share(self):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder extern for docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of addons custom repositories."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Const file for HassIO."""
|
||||
from pathlib import Path
|
||||
|
||||
HASSIO_VERSION = '0.26'
|
||||
HASSIO_VERSION = '0.46'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/master/version.json')
|
||||
@@ -10,22 +10,33 @@ URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
|
||||
HASSIO_SHARE = Path("/data")
|
||||
HASSIO_DATA = Path("/data")
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_SHARE, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_SHARE, "config.json")
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
|
||||
LABEL_VERSION = 'io.hass.version'
|
||||
LABEL_ARCH = 'io.hass.arch'
|
||||
LABEL_TYPE = 'io.hass.type'
|
||||
|
||||
META_ADDON = 'addon'
|
||||
META_SUPERVISOR = 'supervisor'
|
||||
META_HOMEASSISTANT = 'homeassistant'
|
||||
|
||||
JSON_RESULT = 'result'
|
||||
JSON_DATA = 'data'
|
||||
JSON_MESSAGE = 'message'
|
||||
@@ -33,8 +44,13 @@ JSON_MESSAGE = 'message'
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
|
||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||
CONTENT_TYPE_PNG = 'image/png'
|
||||
|
||||
ATTR_DATE = 'date'
|
||||
ATTR_ARCH = 'arch'
|
||||
ATTR_HOSTNAME = 'hostname'
|
||||
ATTR_TIMEZONE = 'timezone'
|
||||
ATTR_OS = 'os'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_SOURCE = 'source'
|
||||
@@ -50,12 +66,14 @@ ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_MAP = 'map'
|
||||
ATTR_WEBUI = 'webui'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_DETACHED = 'detached'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
ATTR_LOGO = 'logo'
|
||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||
ATTR_REPOSITORY = 'repository'
|
||||
ATTR_REPOSITORIES = 'repositories'
|
||||
@@ -67,9 +85,27 @@ ATTR_INITIALIZE = 'initialize'
|
||||
ATTR_SESSION = 'session'
|
||||
ATTR_LOCATON = 'location'
|
||||
ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
ATTR_ENVIRONMENT = 'environment'
|
||||
ATTR_HOST_NETWORK = 'host_network'
|
||||
ATTR_NETWORK = 'network'
|
||||
ATTR_TMPFS = 'tmpfs'
|
||||
ATTR_PRIVILEGED = 'privileged'
|
||||
ATTR_USER = 'user'
|
||||
ATTR_SYSTEM = 'system'
|
||||
ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
ATTR_TIMEOUT = 'timeout'
|
||||
ATTR_AUTO_UPDATE = 'auto_update'
|
||||
ATTR_CUSTOM = 'custom'
|
||||
|
||||
STARTUP_BEFORE = 'before'
|
||||
STARTUP_AFTER = 'after'
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
STARTUP_SERVICES = 'services'
|
||||
STARTUP_APPLICATION = 'application'
|
||||
STARTUP_ONCE = 'once'
|
||||
|
||||
BOOT_AUTO = 'auto'
|
||||
@@ -77,13 +113,26 @@ BOOT_MANUAL = 'manual'
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
STATE_NONE = 'none'
|
||||
|
||||
MAP_CONFIG = 'config'
|
||||
MAP_SSL = 'ssl'
|
||||
MAP_ADDONS = 'addons'
|
||||
MAP_BACKUP = 'backup'
|
||||
MAP_SHARE = 'share'
|
||||
|
||||
ARCH_ARMHF = 'armhf'
|
||||
ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
|
||||
FOLDER_HOMEASSISTANT = 'homeassistant'
|
||||
FOLDER_SHARE = 'share'
|
||||
FOLDER_ADDONS = 'addons/local'
|
||||
FOLDER_SSL = 'ssl'
|
||||
|
||||
SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
110
hassio/core.py
110
hassio/core.py
@@ -5,21 +5,22 @@ import logging
|
||||
import aiohttp
|
||||
import docker
|
||||
|
||||
from . import bootstrap
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_control import HostControl
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE)
|
||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||
RUN_UPDATE_ADDONS_TASKS)
|
||||
from .homeassistant import HomeAssistant
|
||||
from .scheduler import Scheduler
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .snapshots import SnapshotsManager
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog, homeassistant_setup,
|
||||
api_sessions_cleanup)
|
||||
from .tools import get_arch_from_image, get_local_ip
|
||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
||||
from .tools import get_local_ip, fetch_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -27,38 +28,51 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class HassIO(object):
|
||||
"""Main object of hassio."""
|
||||
|
||||
def __init__(self, loop):
|
||||
def __init__(self, loop, config):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||
self.config = bootstrap.initialize_system_data(self.websession)
|
||||
self.scheduler = Scheduler(self.loop)
|
||||
self.api = RestAPI(self.config, self.loop)
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.dock = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
self.config, self.loop, self.dock, self)
|
||||
self.homeassistant = DockerHomeAssistant(
|
||||
self.config, self.loop, self.dock)
|
||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
||||
|
||||
# init homeassistant
|
||||
self.homeassistant = HomeAssistant(
|
||||
config, loop, self.dock, self.websession)
|
||||
|
||||
# init HostControl
|
||||
self.host_control = HostControl(self.loop)
|
||||
self.host_control = HostControl(loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(self.config, self.loop, self.dock)
|
||||
self.addons = AddonManager(config, loop, self.dock)
|
||||
|
||||
# init snapshot system
|
||||
self.snapshots = SnapshotsManager(
|
||||
config, loop, self.scheduler, self.addons, self.homeassistant)
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
await self.supervisor.attach()
|
||||
if not await self.supervisor.attach():
|
||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
||||
await self.supervisor.cleanup()
|
||||
|
||||
# set running arch
|
||||
self.config.arch = self.supervisor.arch
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
|
||||
# update timezone
|
||||
if self.config.timezone == 'UTC':
|
||||
self.config.timezone = await fetch_timezone(self.websession)
|
||||
|
||||
# hostcontrol
|
||||
await self.host_control.load()
|
||||
|
||||
@@ -70,10 +84,12 @@ class HassIO(object):
|
||||
self.api.register_host(self.host_control)
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.addons, self.host_control)
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.websession)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
self.api.register_snapshots(self.snapshots)
|
||||
self.api.register_panel()
|
||||
|
||||
# schedule api session cleanup
|
||||
@@ -81,50 +97,60 @@ class HassIO(object):
|
||||
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
|
||||
now=True)
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
|
||||
now=True)
|
||||
|
||||
# first start of supervisor?
|
||||
if not await self.homeassistant.exists():
|
||||
_LOGGER.info("No HomeAssistant docker found.")
|
||||
await homeassistant_setup(
|
||||
self.config, self.loop, self.homeassistant)
|
||||
# Load homeassistant
|
||||
await self.homeassistant.prepare()
|
||||
|
||||
# Load addons
|
||||
arch = get_arch_from_image(self.supervisor.image)
|
||||
await self.addons.prepare(arch)
|
||||
await self.addons.prepare()
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
self.scheduler.register_task(
|
||||
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
hassio_update(self.config, self.supervisor),
|
||||
hassio_update(self.config, self.supervisor, self.websession),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
# schedule snapshot update tasks
|
||||
self.scheduler.register_task(
|
||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.config, self.supervisor, self.websession)()],
|
||||
loop=self.loop
|
||||
)
|
||||
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
|
||||
# start addon mark as before
|
||||
await self.addons.auto_boot(STARTUP_BEFORE)
|
||||
# start addon mark as services
|
||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
|
||||
# start addon mark as after
|
||||
await self.addons.auto_boot(STARTUP_AFTER)
|
||||
# start addon mark as application
|
||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||
|
||||
finally:
|
||||
# schedule homeassistant watchdog
|
||||
@@ -132,14 +158,18 @@ class HassIO(object):
|
||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
||||
RUN_WATCHDOG_HOMEASSISTANT)
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.homeassistant.version == 'landingpage':
|
||||
self.loop.create_task(self.homeassistant.install())
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.stop()
|
||||
self.scheduler.suspend = True
|
||||
|
||||
# process stop task pararell
|
||||
tasks = [self.websession.close(), self.api.stop()]
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
await self.api.stop()
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..tools import get_version_from_env
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,18 +13,19 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerBase(object):
|
||||
"""Docker hassio wrapper."""
|
||||
|
||||
def __init__(self, config, loop, dock, image=None):
|
||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.image = image
|
||||
self.container = None
|
||||
self.timeout = timeout
|
||||
self.version = None
|
||||
self.arch = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@@ -33,6 +34,24 @@ class DockerBase(object):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
|
||||
def process_metadata(self, metadata, force=False):
|
||||
"""Read metadata and set it to object."""
|
||||
# read image
|
||||
if not self.image:
|
||||
self.image = metadata['Config']['Image']
|
||||
|
||||
# read version
|
||||
need_version = force or not self.version
|
||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||
elif need_version:
|
||||
_LOGGER.warning("Can't read version from %s", self.name)
|
||||
|
||||
# read arch
|
||||
need_arch = force or not self.arch
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
if self._lock.locked():
|
||||
@@ -51,8 +70,8 @@ class DockerBase(object):
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
||||
|
||||
self.version = tag
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
@@ -73,8 +92,7 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.images.get(self.image)
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
@@ -92,17 +110,21 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
else:
|
||||
self.container.reload()
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
image = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return self.container.status == 'running'
|
||||
# container is not running
|
||||
if container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on a old image, stop and start it
|
||||
if container.image.id != image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
@@ -119,17 +141,17 @@ class DockerBase(object):
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.image = self.container.attrs['Config']['Image']
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
_LOGGER.fatal(
|
||||
"Can't attach to %s docker container!", self.docker_name)
|
||||
if self.image:
|
||||
obj_data = self.dock.images.get(self.image).attrs
|
||||
else:
|
||||
obj_data = self.dock.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.process_metadata(obj_data)
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
@@ -163,23 +185,22 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return
|
||||
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
|
||||
self.container.reload()
|
||||
if self.container.status == 'running':
|
||||
if container.status == 'running':
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.stop()
|
||||
container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.remove(force=True)
|
||||
|
||||
self.container = None
|
||||
_LOGGER.info("Clean %s docker application", self.image)
|
||||
container.remove(force=True)
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker container."""
|
||||
"""Remove docker images."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
||||
return False
|
||||
@@ -188,27 +209,32 @@ class DockerBase(object):
|
||||
return await self.loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker container.
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
self._stop()
|
||||
# cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove docker %s with latest and %s",
|
||||
self.image, self.version)
|
||||
_LOGGER.info(
|
||||
"Remove docker %s with latest and %s", self.image, self.version)
|
||||
|
||||
try:
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
except docker.errors.ImageNotFound:
|
||||
return True
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
@@ -225,29 +251,31 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.version)
|
||||
was_running = self._is_running()
|
||||
|
||||
_LOGGER.info("Update docker %s with %s:%s",
|
||||
old_image, self.image, tag)
|
||||
_LOGGER.info(
|
||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||
|
||||
# update docker image
|
||||
if self._install(tag):
|
||||
_LOGGER.info("Cleanup old %s docker", old_image)
|
||||
self._stop()
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning(
|
||||
"Can't remove old image %s -> %s", old_image, err)
|
||||
return True
|
||||
if not self._install(tag):
|
||||
return False
|
||||
|
||||
return False
|
||||
# run or cleanup container
|
||||
if was_running:
|
||||
self._run()
|
||||
else:
|
||||
self._stop()
|
||||
|
||||
# cleanup images
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
async def logs(self):
|
||||
"""Return docker logs of container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute logs while a task is in progress")
|
||||
return False
|
||||
return b""
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._logs)
|
||||
@@ -257,11 +285,13 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
return
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return self.container.logs(tail=100, stdout=True, stderr=True)
|
||||
return container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
|
||||
@@ -279,15 +309,45 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
self.container.restart(timeout=30)
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute cleanup while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
await self.loop.run_in_executor(None, self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return
|
||||
|
||||
for image in self.dock.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.dock.images.remove(image.id, force=True)
|
||||
|
@@ -4,10 +4,12 @@ from pathlib import Path
|
||||
import shutil
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from . import DockerBase
|
||||
from .util import dockerfile_template
|
||||
from ..tools import get_version_from_env
|
||||
from ..const import (
|
||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -15,48 +17,73 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, addons_data, addon):
|
||||
def __init__(self, config, loop, dock, addon):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, dock, image=addons_data.get_image(addon))
|
||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
||||
self.addon = addon
|
||||
self.addons_data = addons_data
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon)
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
}
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addons_data.path_extern_data(self.addon)): {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
|
||||
if self.addons_data.map_config(self.addon):
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_config): {
|
||||
'bind': '/config', 'mode': 'rw'
|
||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
||||
}})
|
||||
|
||||
if self.addons_data.map_ssl(self.addon):
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_ssl): {
|
||||
'bind': '/ssl', 'mode': 'rw'
|
||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
||||
}})
|
||||
|
||||
if self.addons_data.map_addons(self.addon):
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_addons_local): {
|
||||
'bind': '/addons', 'mode': 'rw'
|
||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
||||
}})
|
||||
|
||||
if self.addons_data.map_backup(self.addon):
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_backup): {
|
||||
'bind': '/backup', 'mode': 'rw'
|
||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
||||
}})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.config.path_extern_share): {
|
||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
||||
}})
|
||||
|
||||
return volumes
|
||||
@@ -67,87 +94,68 @@ class DockerAddon(DockerBase):
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
return True
|
||||
|
||||
# cleanup old container
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
# write config
|
||||
if not self.addon.write_options():
|
||||
return False
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
name=self.name,
|
||||
detach=True,
|
||||
network_mode='bridge',
|
||||
ports=self.addons_data.get_ports(self.addon),
|
||||
network_mode=self.addon.network_mode,
|
||||
ports=self.addon.ports,
|
||||
devices=self.addon.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs
|
||||
)
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start docker addon %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info(
|
||||
"Attach to image %s with version %s", self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
pass
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addons_data.need_build(self.addon):
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag)
|
||||
|
||||
async def build(self, tag):
|
||||
"""Build a docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute build while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._build, tag)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_dir = Path(self.config.path_addons_build, self.addon)
|
||||
build_dir = Path(self.config.path_tmp, self.addon.slug)
|
||||
try:
|
||||
# prepare temporary addon build folder
|
||||
try:
|
||||
source = self.addons_data.path_addon_location(self.addon)
|
||||
source = self.addon.path_location
|
||||
shutil.copytree(str(source), str(build_dir))
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
|
||||
source, build_dir)
|
||||
source, err)
|
||||
return False
|
||||
|
||||
# prepare Dockerfile
|
||||
try:
|
||||
dockerfile_template(
|
||||
Path(build_dir, 'Dockerfile'), self.addons_data.arch, tag)
|
||||
Path(build_dir, 'Dockerfile'), self.config.arch,
|
||||
tag, META_ADDON)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't prepare dockerfile -> %s", err)
|
||||
|
||||
@@ -159,8 +167,8 @@ class DockerAddon(DockerBase):
|
||||
image = self.dock.images.build(
|
||||
path=str(build_dir), tag=build_tag, pull=True)
|
||||
|
||||
self.version = tag
|
||||
image.tag(self.image, tag='latest')
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
|
||||
except (docker.errors.DockerException, TypeError) as err:
|
||||
_LOGGER.error("Can't build %s -> %s", build_tag, err)
|
||||
@@ -171,3 +179,74 @@ class DockerAddon(DockerBase):
|
||||
|
||||
finally:
|
||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||
|
||||
async def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute export while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image.stream():
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
return True
|
||||
|
||||
async def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute import while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.dock.api.load_image(read_tar)
|
||||
|
||||
image = self.dock.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self.process_metadata(image.attrs, force=True)
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Addons prepare some thing on start and that is normaly not repeatable.
|
||||
Need run inside executor.
|
||||
"""
|
||||
self._stop()
|
||||
return self._run()
|
||||
|
@@ -4,7 +4,6 @@ import logging
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,15 +13,28 @@ HASS_DOCKER_NAME = 'homeassistant'
|
||||
class DockerHomeAssistant(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
def __init__(self, config, loop, dock, data):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(config, loop, dock, image=config.homeassistant_image)
|
||||
super().__init__(config, loop, dock, image=data.image)
|
||||
self.data = data
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into docker."""
|
||||
if not self.data.devices:
|
||||
return
|
||||
|
||||
devices = []
|
||||
for device in self.data.devices:
|
||||
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
|
||||
|
||||
return devices
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
@@ -31,47 +43,34 @@ class DockerHomeAssistant(DockerBase):
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
# cleanup old container
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
name=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'rw'},
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
})
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update homeassistant docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._update, tag):
|
||||
await self.loop.run_in_executor(None, self._run)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
@@ -2,8 +2,6 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
@@ -13,14 +11,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, hassio, image=None):
|
||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
|
||||
self.hassio = hassio
|
||||
self.stop_callback = stop_callback
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
def name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
@@ -31,41 +28,14 @@ class DockerSupervisor(DockerBase):
|
||||
return False
|
||||
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
old_version = self.version
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.config.hassio_cleanup = old_version
|
||||
self.loop.create_task(self.hassio.stop(RESTART_EXIT_CODE))
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old supervisor version exists and cleanup."""
|
||||
if not self.config.hassio_cleanup:
|
||||
return
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._cleanup):
|
||||
self.config.hassio_cleanup = None
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove old image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.config.hassio_cleanup)
|
||||
|
||||
_LOGGER.info("Old supervisor docker found %s", old_image)
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove old image %s -> %s", old_image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
@@ -4,29 +4,39 @@ import re
|
||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||
|
||||
|
||||
RESIN_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "resin/armhf-alpine:3.5",
|
||||
ARCH_AARCH64: "resin/aarch64-alpine:3.5",
|
||||
ARCH_I386: "resin/i386-alpine:3.5",
|
||||
ARCH_AMD64: "resin/amd64-alpine:3.5",
|
||||
HASSIO_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
|
||||
ARCH_I386: "homeassistant/i386-base:latest",
|
||||
ARCH_AMD64: "homeassistant/amd64-base:latest",
|
||||
}
|
||||
|
||||
TMPL_VERSION = re.compile(r"%%VERSION%%")
|
||||
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
|
||||
|
||||
|
||||
def dockerfile_template(dockerfile, arch, version):
|
||||
def dockerfile_template(dockerfile, arch, version, meta_type):
|
||||
"""Prepare a Hass.IO dockerfile."""
|
||||
buff = []
|
||||
resin_image = RESIN_BASE_IMAGE[arch]
|
||||
hassio_image = HASSIO_BASE_IMAGE[arch]
|
||||
custom_image = re.compile(r"^#{}:FROM".format(arch))
|
||||
|
||||
# read docker
|
||||
with dockerfile.open('r') as dock_input:
|
||||
for line in dock_input:
|
||||
line = TMPL_VERSION.sub(version, line)
|
||||
line = TMPL_IMAGE.sub(resin_image, line)
|
||||
line = TMPL_IMAGE.sub(hassio_image, line)
|
||||
line = custom_image.sub("FROM", line)
|
||||
buff.append(line)
|
||||
|
||||
# add metadata
|
||||
buff.append(create_metadata(version, arch, meta_type))
|
||||
|
||||
# write docker
|
||||
with dockerfile.open('w') as dock_output:
|
||||
dock_output.writelines(buff)
|
||||
|
||||
|
||||
def create_metadata(version, arch, meta_type):
|
||||
"""Generate docker label layer for hassio."""
|
||||
return ('LABEL io.hass.version="{}" '
|
||||
'io.hass.arch="{}" '
|
||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
||||
|
162
hassio/homeassistant.py
Normal file
162
hassio/homeassistant.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""HomeAssistant control object."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_VERSION)
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .tools import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, config, loop, dock, websession):
|
||||
"""Initialize hass object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.websession = websession
|
||||
self.docker = DockerHomeAssistant(config, loop, dock, self)
|
||||
|
||||
async def prepare(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
if not await self.docker.exists():
|
||||
_LOGGER.info("No HomeAssistant docker %s found.", self.image)
|
||||
if self.is_custom_image:
|
||||
await self.install()
|
||||
else:
|
||||
await self.install_landingpage()
|
||||
else:
|
||||
await self.docker.attach()
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of running homeassistant."""
|
||||
return self.docker.version
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of homeassistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.config.last_homeassistant
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of hass containter."""
|
||||
if ATTR_IMAGE in self._data:
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
|
||||
@property
|
||||
def is_custom_image(self):
|
||||
"""Return True if a custom image is used."""
|
||||
return ATTR_IMAGE in self._data
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return extend device mapping."""
|
||||
return self._data[ATTR_DEVICES]
|
||||
|
||||
@devices.setter
|
||||
def devices(self, value):
|
||||
"""Set extend device mapping."""
|
||||
self._data[ATTR_DEVICES] = value
|
||||
self.save()
|
||||
|
||||
def set_custom(self, image, version):
|
||||
"""Set a custom image for homeassistant."""
|
||||
# reset
|
||||
if image is None and version is None:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
self._data.pop(ATTR_VERSION, None)
|
||||
|
||||
self.docker.image = self.image
|
||||
else:
|
||||
if image:
|
||||
self._data[ATTR_IMAGE] = image
|
||||
self.docker.image = image
|
||||
if version:
|
||||
self._data[ATTR_VERSION] = version
|
||||
self.save()
|
||||
|
||||
async def install_landingpage(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
if await self.docker.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
async def install(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self.config.fetch_update_infos(self.websession)
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.docker.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
await self.docker.cleanup()
|
||||
|
||||
def update(self, version=None):
|
||||
"""Update HomeAssistant version.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
version = version or self.last_version
|
||||
return self.docker.update(version)
|
||||
|
||||
def run(self):
|
||||
"""Run HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
|
||||
def stop(self):
|
||||
"""Stop HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
|
||||
def restart(self):
|
||||
"""Restart HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.is_running()
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.docker.in_progress
|
@@ -17,6 +17,7 @@ UNKNOWN = 'unknown'
|
||||
FEATURES_SHUTDOWN = 'shutdown'
|
||||
FEATURES_REBOOT = 'reboot'
|
||||
FEATURES_UPDATE = 'update'
|
||||
FEATURES_HOSTNAME = 'hostname'
|
||||
FEATURES_NETWORK_INFO = 'network_info'
|
||||
FEATURES_NETWORK_CONTROL = 'network_control'
|
||||
|
||||
@@ -117,3 +118,7 @@ class HostControl(object):
|
||||
if version:
|
||||
return self._send_command("update {}".format(version))
|
||||
return self._send_command("update")
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
"""Update hostname on host."""
|
||||
return self._send_command("hostname {}".format(hostname))
|
||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -16,11 +16,7 @@ class Scheduler(object):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self._stop = False
|
||||
|
||||
def stop(self):
|
||||
"""Stop to execute tasks in scheduler."""
|
||||
self._stop = True
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
now=False):
|
||||
@@ -51,11 +47,8 @@ class Scheduler(object):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
# stop execute tasks
|
||||
if self._stop:
|
||||
return
|
||||
|
||||
self.loop.create_task(data[CALL]())
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
||||
|
310
hassio/snapshots/__init__.py
Normal file
310
hassio/snapshots/__init__.py
Normal file
@@ -0,0 +1,310 @@
|
||||
"""Snapshot system control."""
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
|
||||
from .snapshot import Snapshot
|
||||
from .util import create_slug
|
||||
from ..const import (
|
||||
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SnapshotsManager(object):
|
||||
"""Manage snapshots."""
|
||||
|
||||
def __init__(self, config, loop, sheduler, addons, homeassistant):
|
||||
"""Initialize a snapshot manager."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.sheduler = sheduler
|
||||
self.addons = addons
|
||||
self.homeassistant = homeassistant
|
||||
self.snapshots = {}
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def list_snapshots(self):
|
||||
"""Return a list of all snapshot object."""
|
||||
return set(self.snapshots.values())
|
||||
|
||||
def get(self, slug):
|
||||
"""Return snapshot object."""
|
||||
return self.snapshots.get(slug)
|
||||
|
||||
def _create_snapshot(self, name, sys_type):
|
||||
"""Initialize a new snapshot object from name."""
|
||||
date_str = datetime.utcnow().isoformat()
|
||||
slug = create_slug(name, date_str)
|
||||
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
|
||||
|
||||
# init object
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
snapshot.create(slug, name, date_str, sys_type)
|
||||
|
||||
# set general data
|
||||
snapshot.snapshot_homeassistant(self.homeassistant)
|
||||
snapshot.repositories = self.config.addons_repositories
|
||||
|
||||
return snapshot
|
||||
|
||||
async def reload(self):
|
||||
"""Load exists backups."""
|
||||
self.snapshots = {}
|
||||
|
||||
async def _load_snapshot(tar_file):
|
||||
"""Internal function to load snapshot."""
|
||||
snapshot = Snapshot(self.config, self.loop, tar_file)
|
||||
if await snapshot.load():
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
|
||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||
self.config.path_backup.glob("*.tar")]
|
||||
|
||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
def remove(self, snapshot):
|
||||
"""Remove a snapshot."""
|
||||
try:
|
||||
snapshot.tar_file.unlink()
|
||||
self.snapshots.pop(snapshot.slug, None)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def do_snapshot_full(self, name=""):
|
||||
"""Create a full snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
|
||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for addon in self.addons.list_addons:
|
||||
if not addon.is_installed:
|
||||
continue
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders()
|
||||
|
||||
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_snapshot_partial(self, name="", addons=None, folders=None):
|
||||
"""Create a partial snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# snapshot addons
|
||||
tasks = []
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon.is_installed:
|
||||
tasks.append(snapshot.import_addon(addon))
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Snapshot %s run %d addons",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Partial-Snapshot %s store folders %s",
|
||||
snapshot.slug, folders)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
|
||||
self.snapshots[snapshot.slug] = snapshot
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_restore_full(self, snapshot):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
if snapshot.sys_type != SNAPSHOT_FULL:
|
||||
_LOGGER.error(
|
||||
"Full-Restore %s is only a partial snapshot!", snapshot.slug)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
# stop system
|
||||
tasks = []
|
||||
tasks.append(self.homeassistant.stop())
|
||||
|
||||
for addon in self.addons.list_addons:
|
||||
if addon.is_installed:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# restore folders
|
||||
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
|
||||
await snapshot.restore_folders()
|
||||
|
||||
# start homeassistant restore
|
||||
snapshot.restore_homeassistant(self.homeassistant)
|
||||
task_hass = self.loop.create_task(
|
||||
self.homeassistant.update(snapshot.homeassistant_version))
|
||||
|
||||
# restore repositories
|
||||
await self.addons.load_repositories(snapshot.repositories)
|
||||
|
||||
# restore addons
|
||||
tasks = []
|
||||
actual_addons = \
|
||||
set(addon.slug for addon in self.addons.list_addons
|
||||
if addon.is_installed)
|
||||
restore_addons = \
|
||||
set(data[ATTR_SLUG] for data in snapshot.addons)
|
||||
remove_addons = actual_addons - restore_addons
|
||||
|
||||
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
|
||||
snapshot.slug, restore_addons, remove_addons)
|
||||
|
||||
for slug in remove_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(addon.uninstall())
|
||||
else:
|
||||
_LOGGER.warning("Can't remove addon %s", slug)
|
||||
|
||||
for slug in restore_addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Full-Restore %s restore addons tasks %d",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# finish homeassistant task
|
||||
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
|
||||
snapshot.slug)
|
||||
await task_hass
|
||||
await self.homeassistant.run()
|
||||
|
||||
_LOGGER.info("Full-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
||||
|
||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||
addons=None, folders=None):
|
||||
"""Restore a snapshot."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("It is already a snapshot/restore process running")
|
||||
return False
|
||||
|
||||
addons = addons or []
|
||||
folders = folders or []
|
||||
|
||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||
try:
|
||||
self.sheduler.suspend = True
|
||||
await self._lock.acquire()
|
||||
|
||||
async with snapshot:
|
||||
tasks = []
|
||||
|
||||
if FOLDER_HOMEASSISTANT in folders:
|
||||
await self.homeassistant.stop()
|
||||
|
||||
if folders:
|
||||
_LOGGER.info("Partial-Restore %s restore folders %s",
|
||||
snapshot.slug, folders)
|
||||
await snapshot.restore_folders(folders)
|
||||
|
||||
if homeassistant:
|
||||
snapshot.restore_homeassistant(self.homeassistant)
|
||||
tasks.append(self.homeassistant.update(
|
||||
snapshot.homeassistant_version))
|
||||
|
||||
for slug in addons:
|
||||
addon = self.addons.get(slug)
|
||||
if addon:
|
||||
tasks.append(snapshot.export_addon(addon))
|
||||
else:
|
||||
_LOGGER.warning("Can't restore addon %s", slug)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Partial-Restore %s run %d tasks",
|
||||
snapshot.slug, len(tasks))
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
# make sure homeassistant run agen
|
||||
await self.homeassistant.run()
|
||||
|
||||
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
|
||||
return True
|
||||
|
||||
except (OSError, ValueError, tarfile.TarError) as err:
|
||||
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
|
||||
return False
|
||||
|
||||
finally:
|
||||
self.sheduler.suspend = False
|
||||
self._lock.release()
|
300
hassio/snapshots/snapshot.py
Normal file
300
hassio/snapshots/snapshot.py
Normal file
@@ -0,0 +1,300 @@
|
||||
"""Represent a snapshot file."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
|
||||
from .util import remove_folder
|
||||
from ..const import (
|
||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
||||
ATTR_IMAGE)
|
||||
from ..tools import write_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Snapshot(object):
|
||||
"""A signle hassio snapshot."""
|
||||
|
||||
def __init__(self, config, loop, tar_file):
|
||||
"""Initialize a snapshot."""
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.tar_file = tar_file
|
||||
self._data = {}
|
||||
self._tmp = None
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return snapshot slug."""
|
||||
return self._data.get(ATTR_SLUG)
|
||||
|
||||
@property
|
||||
def sys_type(self):
|
||||
"""Return snapshot type."""
|
||||
return self._data.get(ATTR_TYPE)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return snapshot name."""
|
||||
return self._data[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_DATE]
|
||||
|
||||
@property
|
||||
def addons(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_ADDONS]
|
||||
|
||||
@property
|
||||
def folders(self):
|
||||
"""Return list of saved folders."""
|
||||
return self._data[ATTR_FOLDERS]
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return snapshot date."""
|
||||
return self._data[ATTR_REPOSITORIES]
|
||||
|
||||
@repositories.setter
|
||||
def repositories(self, value):
|
||||
"""Set snapshot date."""
|
||||
self._data[ATTR_REPOSITORIES] = value
|
||||
|
||||
@property
|
||||
def homeassistant_version(self):
|
||||
"""Return snapshot homeassistant version."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
|
||||
|
||||
@homeassistant_version.setter
|
||||
def homeassistant_version(self, value):
|
||||
"""Set snapshot homeassistant version."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def homeassistant_devices(self):
|
||||
"""Return snapshot homeassistant devices."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES)
|
||||
|
||||
@homeassistant_devices.setter
|
||||
def homeassistant_devices(self, value):
|
||||
"""Set snapshot homeassistant devices."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return snapshot homeassistant custom image."""
|
||||
return self._data[ATTR_HOMEASSISTANT].get(ATTR_IMAGE)
|
||||
|
||||
@homeassistant_image.setter
|
||||
def homeassistant_image(self, value):
|
||||
"""Set snapshot homeassistant custom image."""
|
||||
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
"""Return snapshot size."""
|
||||
if not self.tar_file.is_file():
|
||||
return 0
|
||||
return self.tar_file.stat().st_size / 1048576 # calc mbyte
|
||||
|
||||
def create(self, slug, name, date, sys_type):
|
||||
"""Initialize a new snapshot."""
|
||||
# init metadata
|
||||
self._data[ATTR_SLUG] = slug
|
||||
self._data[ATTR_NAME] = name
|
||||
self._data[ATTR_DATE] = date
|
||||
self._data[ATTR_TYPE] = sys_type
|
||||
|
||||
# init other constructs
|
||||
self._data[ATTR_HOMEASSISTANT] = {}
|
||||
self._data[ATTR_ADDONS] = []
|
||||
self._data[ATTR_REPOSITORIES] = []
|
||||
self._data[ATTR_FOLDERS] = []
|
||||
|
||||
def snapshot_homeassistant(self, homeassistant):
|
||||
"""Read all data from homeassistant object."""
|
||||
self.homeassistant_version = homeassistant.version
|
||||
self.homeassistant_devices = homeassistant.devices
|
||||
|
||||
# custom image
|
||||
if homeassistant.is_custom_image:
|
||||
self.homeassistant_image = homeassistant.image
|
||||
|
||||
def restore_homeassistant(self, homeassistant):
|
||||
"""Write all data to homeassistant object."""
|
||||
homeassistant.devices = self.homeassistant_devices
|
||||
|
||||
# custom image
|
||||
if self.homeassistant_image:
|
||||
homeassistant.set_custom(
|
||||
self.homeassistant_image, self.homeassistant_version)
|
||||
|
||||
async def load(self):
|
||||
"""Read snapshot.json from tar file."""
|
||||
if not self.tar_file.is_file():
|
||||
_LOGGER.error("No tarfile %s", self.tar_file)
|
||||
return False
|
||||
|
||||
def _load_file():
|
||||
"""Read snapshot.json."""
|
||||
with tarfile.open(self.tar_file, "r:") as snapshot:
|
||||
json_file = snapshot.extractfile("./snapshot.json")
|
||||
return json_file.read()
|
||||
|
||||
# read snapshot.json
|
||||
try:
|
||||
raw = await self.loop.run_in_executor(None, _load_file)
|
||||
except (tarfile.TarError, KeyError) as err:
|
||||
_LOGGER.error(
|
||||
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
|
||||
return False
|
||||
|
||||
# parse data
|
||||
try:
|
||||
raw_dict = json.loads(raw)
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
|
||||
return False
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(raw_dict)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
|
||||
humanize_error(raw_dict, err))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context to open a snapshot."""
|
||||
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
|
||||
|
||||
# create a snapshot
|
||||
if not self.tar_file.is_file():
|
||||
return self
|
||||
|
||||
# extract a exists snapshot
|
||||
def _extract_snapshot():
|
||||
"""Extract a snapshot."""
|
||||
with tarfile.open(self.tar_file, "r:") as tar:
|
||||
tar.extractall(path=self._tmp.name)
|
||||
|
||||
await self.loop.run_in_executor(None, _extract_snapshot)
|
||||
|
||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||
"""Async context to close a snapshot."""
|
||||
# exists snapshot or exception on build
|
||||
if self.tar_file.is_file() or exception_type is not None:
|
||||
return self._tmp.cleanup()
|
||||
|
||||
# validate data
|
||||
try:
|
||||
self._data = SCHEMA_SNAPSHOT(self._data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
|
||||
humanize_error(self._data, err))
|
||||
raise ValueError("Invalid config") from None
|
||||
|
||||
# new snapshot, build it
|
||||
def _create_snapshot():
|
||||
"""Create a new snapshot."""
|
||||
with tarfile.open(self.tar_file, "w:") as tar:
|
||||
tar.add(self._tmp.name, arcname=".")
|
||||
|
||||
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
|
||||
await self.loop.run_in_executor(None, _create_snapshot)
|
||||
else:
|
||||
_LOGGER.error("Can't write snapshot.json")
|
||||
|
||||
self._tmp.cleanup()
|
||||
self._tmp = None
|
||||
|
||||
async def import_addon(self, addon):
|
||||
"""Add a addon into snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
|
||||
if not await addon.snapshot(snapshot_file):
|
||||
_LOGGER.error("Can't make snapshot from %s", addon.slug)
|
||||
return False
|
||||
|
||||
# store to config
|
||||
self._data[ATTR_ADDONS].append({
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
})
|
||||
|
||||
return True
|
||||
|
||||
async def export_addon(self, addon):
|
||||
"""Restore a addon from snapshot."""
|
||||
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
|
||||
|
||||
if not await addon.restore(snapshot_file):
|
||||
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def store_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
|
||||
def _folder_save(name):
|
||||
"""Intenal function to snapshot a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "w:gz",
|
||||
compresslevel=1) as tar_file:
|
||||
tar_file.add(origin_dir, arcname=".")
|
||||
|
||||
self._data[ATTR_FOLDERS].append(name)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def restore_folders(self, folder_list=None):
|
||||
"""Backup hassio data into snapshot."""
|
||||
folder_list = folder_list or ALL_FOLDERS
|
||||
|
||||
def _folder_restore(name):
|
||||
"""Intenal function to restore a folder."""
|
||||
slug_name = name.replace("/", "_")
|
||||
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
|
||||
origin_dir = Path(self.config.path_hassio, name)
|
||||
|
||||
# clean old stuff
|
||||
if origin_dir.is_dir():
|
||||
remove_folder(origin_dir)
|
||||
|
||||
try:
|
||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||
tar_file.extractall(path=origin_dir)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||
|
||||
# run tasks
|
||||
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
|
||||
for folder in folder_list]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
21
hassio/snapshots/util.py
Normal file
21
hassio/snapshots/util.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Util addons functions."""
|
||||
import hashlib
|
||||
import shutil
|
||||
|
||||
|
||||
def create_slug(name, date_str):
|
||||
"""Generate a hash from repository."""
|
||||
key = "{} - {}".format(date_str, name).lower().encode()
|
||||
return hashlib.sha1(key).hexdigest()[:8]
|
||||
|
||||
|
||||
def remove_folder(folder):
|
||||
"""Remove folder data but not the folder itself."""
|
||||
for obj in folder.iterdir():
|
||||
try:
|
||||
if obj.is_dir():
|
||||
shutil.rmtree(str(obj), ignore_errors=True)
|
||||
else:
|
||||
obj.unlink()
|
||||
except (OSError, shutil.Error):
|
||||
pass
|
32
hassio/snapshots/validate.py
Normal file
32
hassio/snapshots/validate.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Validate some things around restore."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
||||
ATTR_IMAGE, FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||
from ..validate import HASS_DEVICES
|
||||
|
||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_DATE): vol.Coerce(str),
|
||||
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
}),
|
||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
})],
|
||||
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
|
||||
}, extra=vol.ALLOW_EXTRA)
|
@@ -18,13 +18,38 @@ def api_sessions_cleanup(config):
|
||||
return _api_sessions_cleanup
|
||||
|
||||
|
||||
def hassio_update(config, supervisor):
|
||||
def addons_update(loop, addons):
|
||||
"""Create scheduler task for auto update addons."""
|
||||
async def _addons_update():
|
||||
"""Check if a update is available of a addon and update it."""
|
||||
tasks = []
|
||||
for addon in addons.list_addons:
|
||||
if not addon.is_installed or not addon.auto_update:
|
||||
continue
|
||||
|
||||
if addon.version_installed != addon.version:
|
||||
tasks.append(addon.update())
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||
await asyncio.wait(tasks, loop=loop)
|
||||
|
||||
return _addons_update
|
||||
|
||||
|
||||
def hassio_update(config, supervisor, websession):
|
||||
"""Create scheduler task for update of supervisor hassio."""
|
||||
async def _hassio_update():
|
||||
"""Check and run update of supervisor hassio."""
|
||||
await config.fetch_update_infos(websession)
|
||||
if config.last_hassio == supervisor.version:
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if config.upstream_beta:
|
||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassIO version %s.", config.last_hassio)
|
||||
await supervisor.update(config.last_hassio)
|
||||
|
||||
@@ -41,20 +66,3 @@ def homeassistant_watchdog(loop, homeassistant):
|
||||
loop.create_task(homeassistant.run())
|
||||
|
||||
return _homeassistant_watchdog
|
||||
|
||||
|
||||
async def homeassistant_setup(config, loop, homeassistant):
|
||||
"""Install a homeassistant docker container."""
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not config.last_homeassistant:
|
||||
await config.fetch_update_infos()
|
||||
|
||||
tag = config.last_homeassistant
|
||||
if tag and await homeassistant.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on setup HomeAssistant. Retry in 60.")
|
||||
await asyncio.sleep(60, loop=loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
||||
|
@@ -1,19 +1,21 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import pytz
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
||||
_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor")
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
|
||||
async def fetch_last_versions(websession, beta=False):
|
||||
@@ -34,24 +36,6 @@ async def fetch_last_versions(websession, beta=False):
|
||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
||||
|
||||
|
||||
def get_arch_from_image(image):
|
||||
"""Return arch from hassio image name."""
|
||||
found = _IMAGE_ARCH.match(image)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
|
||||
def get_version_from_env(env_list):
|
||||
"""Extract Version from ENV list."""
|
||||
for env in env_list:
|
||||
found = _RE_VERSION.match(env)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
_LOGGER.error("Can't find VERSION in env")
|
||||
return None
|
||||
|
||||
|
||||
def get_local_ip(loop):
|
||||
"""Retrieve local IP address.
|
||||
|
||||
@@ -90,3 +74,69 @@ def read_json_file(jsonfile):
|
||||
"""Read a json file and return a dict."""
|
||||
with jsonfile.open('r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
"""Validate voluptuous timezone."""
|
||||
try:
|
||||
pytz.timezone(timezone)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
|
||||
return timezone
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
with suppress(aiohttp.ClientError, asyncio.TimeoutError,
|
||||
json.JSONDecodeError, KeyError):
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(FREEGEOIP_URL) as request:
|
||||
data = await request.json()
|
||||
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
||||
|
||||
class JsonConfig(object):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, json_file, schema):
|
||||
"""Initialize hass object."""
|
||||
self._file = json_file
|
||||
self._schema = schema
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if self._file.is_file():
|
||||
try:
|
||||
self._data = read_json_file(self._file)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", self._file)
|
||||
self._data = {}
|
||||
|
||||
# validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse %s -> %s",
|
||||
self._file, humanize_error(self._data, ex))
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
# validate
|
||||
try:
|
||||
self._data = self._schema(self._data)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Can't parse data -> %s",
|
||||
humanize_error(self._data, ex))
|
||||
return False
|
||||
|
||||
# write
|
||||
if not write_json_file(self._file, self._data):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
42
hassio/validate.py
Normal file
42
hassio/validate.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Validate functions."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION
|
||||
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||
|
||||
|
||||
def convert_to_docker_ports(data):
|
||||
"""Convert data into docker port list."""
|
||||
# dynamic ports
|
||||
if data is None:
|
||||
return
|
||||
|
||||
# single port
|
||||
if isinstance(data, int):
|
||||
return NETWORK_PORT(data)
|
||||
|
||||
# port list
|
||||
if isinstance(data, list) and len(data) > 2:
|
||||
return vol.Schema([NETWORK_PORT])(data)
|
||||
|
||||
# ip port mapping
|
||||
if isinstance(data, list) and len(data) == 2:
|
||||
return (vol.Coerce(str)(data[0]), NETWORK_PORT(data[1]))
|
||||
|
||||
raise vol.Invalid("Can't validate docker host settings")
|
||||
|
||||
|
||||
DOCKER_PORTS = vol.Schema({
|
||||
vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")):
|
||||
convert_to_docker_ports,
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
})
|
Submodule home-assistant-polymer updated: a341ccf944...5cdba73bac
11
setup.py
11
setup.py
@@ -29,7 +29,13 @@ setup(
|
||||
keywords=['docker', 'home-assistant', 'api'],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=['hassio', 'hassio.dock', 'hassio.api', 'hassio.addons'],
|
||||
packages=[
|
||||
'hassio',
|
||||
'hassio.dock',
|
||||
'hassio.api',
|
||||
'hassio.addons',
|
||||
'hassio.snapshots'
|
||||
],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout',
|
||||
@@ -39,6 +45,7 @@ setup(
|
||||
'voluptuous',
|
||||
'gitpython',
|
||||
'pyotp',
|
||||
'pyqrcode'
|
||||
'pyqrcode',
|
||||
'pytz'
|
||||
]
|
||||
)
|
||||
|
11
version.json
11
version.json
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"hassio": "0.26",
|
||||
"homeassistant": "0.44.2",
|
||||
"resinos": "0.7",
|
||||
"resinhup": "0.1",
|
||||
"generic": "0.3"
|
||||
"hassio": "0.46",
|
||||
"homeassistant": "0.49",
|
||||
"resinos": "1.0",
|
||||
"resinhup": "0.2",
|
||||
"generic": "0.3",
|
||||
"cluster": "0.1"
|
||||
}
|
||||
|
Reference in New Issue
Block a user