mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-12 06:29:38 +00:00
Compare commits
114 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2bf440a744 | ||
![]() |
3b26136636 | ||
![]() |
8249f042c0 | ||
![]() |
84bbaeee5f | ||
![]() |
b7620b7adf | ||
![]() |
5a80be9fd4 | ||
![]() |
a733886803 | ||
![]() |
834fd29fab | ||
![]() |
fd1caf8aa6 | ||
![]() |
975c9e8061 | ||
![]() |
0b3c5885ec | ||
![]() |
711b63e2d0 | ||
![]() |
c7b833b5eb | ||
![]() |
fd472b3084 | ||
![]() |
dcbb6a2160 | ||
![]() |
56fa1550d2 | ||
![]() |
e1f97860ee | ||
![]() |
6ab3fe18d9 | ||
![]() |
7969f3dfd7 | ||
![]() |
6f05b90e4e | ||
![]() |
3aa53d99d7 | ||
![]() |
3525f5a02f | ||
![]() |
04514a9f5c | ||
![]() |
1c915ef4cd | ||
![]() |
b03a2c5c5f | ||
![]() |
64988b285e | ||
![]() |
5c69dca7b3 | ||
![]() |
dfda7dc748 | ||
![]() |
cb7710c23f | ||
![]() |
f9b12a2eb2 | ||
![]() |
6a7617faad | ||
![]() |
05554ccf7e | ||
![]() |
a94e6c5303 | ||
![]() |
d6fc8892db | ||
![]() |
fa9b3b939e | ||
![]() |
70685c41be | ||
![]() |
a3209c4bde | ||
![]() |
f3e60f6c28 | ||
![]() |
7798e7cde2 | ||
![]() |
4af92b9d25 | ||
![]() |
eab958860c | ||
![]() |
09bba96940 | ||
![]() |
a34806d4e2 | ||
![]() |
f00b21dc28 | ||
![]() |
021946e181 | ||
![]() |
6cab017042 | ||
![]() |
5999b48be4 | ||
![]() |
57f3178408 | ||
![]() |
14013ac923 | ||
![]() |
d08343d040 | ||
![]() |
2f9f9c6165 | ||
![]() |
8ab0ed5047 | ||
![]() |
0119b52e11 | ||
![]() |
1382a7b36e | ||
![]() |
2eeb8bf388 | ||
![]() |
5af3040223 | ||
![]() |
47491ca55b | ||
![]() |
b06ce9b6b4 | ||
![]() |
38284e036d | ||
![]() |
27a079742d | ||
![]() |
7f33b3b5aa | ||
![]() |
261bda82db | ||
![]() |
c39d6357f3 | ||
![]() |
d1b30a0e95 | ||
![]() |
6a74893a30 | ||
![]() |
b61d5625fe | ||
![]() |
8d468328f3 | ||
![]() |
cd3b382902 | ||
![]() |
99cf44aacd | ||
![]() |
eaa489abec | ||
![]() |
46f323791d | ||
![]() |
ec72d38220 | ||
![]() |
f5b166a7f0 | ||
![]() |
8afde1e881 | ||
![]() |
f751b0e6fc | ||
![]() |
3809f20c6a | ||
![]() |
68390469df | ||
![]() |
4c122a0630 | ||
![]() |
d06696cd94 | ||
![]() |
8d094d5c70 | ||
![]() |
068c463c98 | ||
![]() |
fc95933098 | ||
![]() |
630137a576 | ||
![]() |
857f346b35 | ||
![]() |
d98b4f039f | ||
![]() |
8fee52da5e | ||
![]() |
0f9ad3658b | ||
![]() |
1155ee07e5 | ||
![]() |
fa687e982e | ||
![]() |
4e902af937 | ||
![]() |
6455ad14a7 | ||
![]() |
4753c058a3 | ||
![]() |
1567cbfe37 | ||
![]() |
3ed66c802e | ||
![]() |
980baf23a8 | ||
![]() |
d69af6a62b | ||
![]() |
863456525f | ||
![]() |
dae49df7b1 | ||
![]() |
282fc03687 | ||
![]() |
f9f7e07c52 | ||
![]() |
12a2ccf0ec | ||
![]() |
a98d76618a | ||
![]() |
7a59e7392b | ||
![]() |
446aff3fa6 | ||
![]() |
3272403141 | ||
![]() |
d1f265da9e | ||
![]() |
4915c935dd | ||
![]() |
e78d935824 | ||
![]() |
934ca64a32 | ||
![]() |
0860e6d202 | ||
![]() |
c3e1c8b58e | ||
![]() |
44e48095c7 | ||
![]() |
a13eb7841d | ||
![]() |
b5701c5878 |
133
API.md
133
API.md
@@ -1,10 +1,11 @@
|
|||||||
# HassIO Server
|
# Hass.io Server
|
||||||
|
|
||||||
## HassIO REST API
|
## Hass.io RESTful API
|
||||||
|
|
||||||
Interface for HomeAssistant to control things from supervisor.
|
Interface for Home Assistant to control things from supervisor.
|
||||||
|
|
||||||
On error:
|
On error:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"result": "error",
|
"result": "error",
|
||||||
@@ -12,7 +13,8 @@ On error:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
On success
|
On success:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"result": "ok",
|
"result": "ok",
|
||||||
@@ -20,10 +22,9 @@ On success
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### HassIO
|
### Hass.io
|
||||||
|
|
||||||
- GET `/supervisor/ping`
|
- GET `/supervisor/ping`
|
||||||
|
|
||||||
- GET `/supervisor/info`
|
- GET `/supervisor/info`
|
||||||
|
|
||||||
The addons from `addons` are only installed one.
|
The addons from `addons` are only installed one.
|
||||||
@@ -54,7 +55,9 @@ The addons from `addons` are only installed one.
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/supervisor/update`
|
- POST `/supervisor/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
@@ -62,6 +65,7 @@ Optional:
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/supervisor/options`
|
- POST `/supervisor/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"beta_channel": "true|false",
|
"beta_channel": "true|false",
|
||||||
@@ -78,11 +82,12 @@ Reload addons/version.
|
|||||||
|
|
||||||
- GET `/supervisor/logs`
|
- GET `/supervisor/logs`
|
||||||
|
|
||||||
Output the raw docker log
|
Output is the raw docker log.
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
|
|
||||||
- GET `/security/info`
|
- GET `/security/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"initialize": "bool",
|
"initialize": "bool",
|
||||||
@@ -91,6 +96,7 @@ Output the raw docker log
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/security/options`
|
- POST `/security/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"password": "xy"
|
"password": "xy"
|
||||||
@@ -98,6 +104,7 @@ Output the raw docker log
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/security/totp`
|
- POST `/security/totp`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"password": "xy"
|
"password": "xy"
|
||||||
@@ -117,6 +124,7 @@ Return QR-Code
|
|||||||
### Backup/Snapshot
|
### Backup/Snapshot
|
||||||
|
|
||||||
- GET `/snapshots`
|
- GET `/snapshots`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"snapshots": [
|
"snapshots": [
|
||||||
@@ -132,6 +140,7 @@ Return QR-Code
|
|||||||
- POST `/snapshots/reload`
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
- POST `/snapshots/new/full`
|
- POST `/snapshots/new/full`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "Optional"
|
"name": "Optional"
|
||||||
@@ -139,6 +148,7 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/new/partial`
|
- POST `/snapshots/new/partial`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "Optional",
|
"name": "Optional",
|
||||||
@@ -150,6 +160,7 @@ Return QR-Code
|
|||||||
- POST `/snapshots/reload`
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
- GET `/snapshots/{slug}/info`
|
- GET `/snapshots/{slug}/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"slug": "SNAPSHOT ID",
|
"slug": "SNAPSHOT ID",
|
||||||
@@ -174,10 +185,9 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/remove`
|
- POST `/snapshots/{slug}/remove`
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/full`
|
- POST `/snapshots/{slug}/restore/full`
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/partial`
|
- POST `/snapshots/{slug}/restore/partial`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"homeassistant": "bool",
|
"homeassistant": "bool",
|
||||||
@@ -187,36 +197,68 @@ Return QR-Code
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Host
|
### Host
|
||||||
|
|
||||||
- POST `/host/reload`
|
- POST `/host/reload`
|
||||||
|
|
||||||
- POST `/host/shutdown`
|
- POST `/host/shutdown`
|
||||||
|
|
||||||
- POST `/host/reboot`
|
- POST `/host/reboot`
|
||||||
|
|
||||||
- GET `/host/info`
|
- GET `/host/info`
|
||||||
See HostControl info command.
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"type": "",
|
"type": "",
|
||||||
"version": "",
|
"version": "",
|
||||||
"last_version": "",
|
"last_version": "",
|
||||||
"features": ["shutdown", "reboot", "update", "network_info", "network_control"],
|
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||||
"hostname": "",
|
"hostname": "",
|
||||||
"os": ""
|
"os": "",
|
||||||
|
"audio": {
|
||||||
|
"input": "0,0",
|
||||||
|
"output": "0,0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/host/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"audio_input": "0,0",
|
||||||
|
"audio_output": "0,0"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/host/update`
|
- POST `/host/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/host/hardware`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"serial": ["/dev/xy"],
|
||||||
|
"input": ["Input device name"],
|
||||||
|
"disk": ["/dev/sdax"],
|
||||||
|
"audio": {
|
||||||
|
"CARD_ID": {
|
||||||
|
"name": "xy",
|
||||||
|
"type": "microphone",
|
||||||
|
"devices": {
|
||||||
|
"DEV_ID": "type of device"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### Network
|
### Network
|
||||||
|
|
||||||
- GET `/network/info`
|
- GET `/network/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"hostname": ""
|
"hostname": ""
|
||||||
@@ -224,18 +266,14 @@ Optional:
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/network/options`
|
- POST `/network/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"hostname": "",
|
"hostname": "",
|
||||||
"mode": "dhcp|fixed",
|
|
||||||
"ssid": "",
|
|
||||||
"ip": "",
|
|
||||||
"netmask": "",
|
|
||||||
"gateway": ""
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### HomeAssistant
|
### Home Assistant
|
||||||
|
|
||||||
- GET `/homeassistant/info`
|
- GET `/homeassistant/info`
|
||||||
|
|
||||||
@@ -245,12 +283,15 @@ Optional:
|
|||||||
"last_version": "LAST_VERSION",
|
"last_version": "LAST_VERSION",
|
||||||
"devices": [""],
|
"devices": [""],
|
||||||
"image": "str",
|
"image": "str",
|
||||||
"custom": "bool -> if custom image"
|
"custom": "bool -> if custom image",
|
||||||
|
"boot": "bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/homeassistant/update`
|
- POST `/homeassistant/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
@@ -259,11 +300,14 @@ Optional:
|
|||||||
|
|
||||||
- GET `/homeassistant/logs`
|
- GET `/homeassistant/logs`
|
||||||
|
|
||||||
Output the raw docker log
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- POST `/homeassistant/restart`
|
- POST `/homeassistant/restart`
|
||||||
|
|
||||||
- POST `/homeassistant/options`
|
- POST `/homeassistant/options`
|
||||||
|
- POST `/homeassistant/check`
|
||||||
|
- POST `/homeassistant/start`
|
||||||
|
- POST `/homeassistant/stop`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"devices": [],
|
"devices": [],
|
||||||
@@ -274,11 +318,11 @@ Output the raw docker log
|
|||||||
|
|
||||||
Image with `null` and last_version with `null` reset this options.
|
Image with `null` and last_version with `null` reset this options.
|
||||||
|
|
||||||
### REST API addons
|
### RESTful for API addons
|
||||||
|
|
||||||
- GET `/addons`
|
- GET `/addons`
|
||||||
|
|
||||||
Get all available addons
|
Get all available addons.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -293,8 +337,12 @@ Get all available addons
|
|||||||
"installed": "none|INSTALL_VERSION",
|
"installed": "none|INSTALL_VERSION",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
"build": "bool",
|
"build": "bool",
|
||||||
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
|
"devices": ["/dev/xy"],
|
||||||
"url": "null|url",
|
"url": "null|url",
|
||||||
"logo": "bool"
|
"logo": "bool",
|
||||||
|
"audio": "bool",
|
||||||
|
"hassio_api": "bool"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"repositories": [
|
"repositories": [
|
||||||
@@ -310,8 +358,8 @@ Get all available addons
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/addons/reload`
|
- POST `/addons/reload`
|
||||||
|
|
||||||
- GET `/addons/{addon}/info`
|
- GET `/addons/{addon}/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
@@ -328,14 +376,21 @@ Get all available addons
|
|||||||
"options": "{}",
|
"options": "{}",
|
||||||
"network": "{}|null",
|
"network": "{}|null",
|
||||||
"host_network": "bool",
|
"host_network": "bool",
|
||||||
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
|
"devices": ["/dev/xy"],
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
"webui": "null|http(s)://[HOST]:port/xy/zx"
|
"hassio_api": "bool",
|
||||||
|
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||||
|
"audio": "bool",
|
||||||
|
"audio_input": "null|0,0",
|
||||||
|
"audio_output": "null|0,0"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- GET `/addons/{addon}/logo`
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
- POST `/addons/{addon}/options`
|
- POST `/addons/{addon}/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"boot": "auto|manual",
|
"boot": "auto|manual",
|
||||||
@@ -344,17 +399,21 @@ Get all available addons
|
|||||||
"CONTAINER": "port|[ip, port]"
|
"CONTAINER": "port|[ip, port]"
|
||||||
},
|
},
|
||||||
"options": {},
|
"options": {},
|
||||||
|
"audio_output": "null|0,0",
|
||||||
|
"audio_input": "null|0,0"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
For reset custom network settings, set it `null`.
|
For reset custom network/audio settings, set it `null`.
|
||||||
|
|
||||||
- POST `/addons/{addon}/start`
|
- POST `/addons/{addon}/start`
|
||||||
|
|
||||||
- POST `/addons/{addon}/stop`
|
- POST `/addons/{addon}/stop`
|
||||||
|
|
||||||
- POST `/addons/{addon}/install`
|
- POST `/addons/{addon}/install`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
@@ -364,7 +423,9 @@ Optional:
|
|||||||
- POST `/addons/{addon}/uninstall`
|
- POST `/addons/{addon}/uninstall`
|
||||||
|
|
||||||
- POST `/addons/{addon}/update`
|
- POST `/addons/{addon}/update`
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "VERSION"
|
"version": "VERSION"
|
||||||
@@ -373,15 +434,20 @@ Optional:
|
|||||||
|
|
||||||
- GET `/addons/{addon}/logs`
|
- GET `/addons/{addon}/logs`
|
||||||
|
|
||||||
Output the raw docker log
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- POST `/addons/{addon}/restart`
|
- POST `/addons/{addon}/restart`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/rebuild`
|
||||||
|
|
||||||
|
Only supported for local build addons
|
||||||
|
|
||||||
## Host Control
|
## Host Control
|
||||||
|
|
||||||
Communicate over unix socket with a host daemon.
|
Communicate over UNIX socket with a host daemon.
|
||||||
|
|
||||||
- commands
|
- commands
|
||||||
|
|
||||||
```
|
```
|
||||||
# info
|
# info
|
||||||
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
-> {'type', 'version', 'last_version', 'features', 'hostname'}
|
||||||
@@ -400,7 +466,8 @@ Communicate over unix socket with a host daemon.
|
|||||||
# network int route xy
|
# network int route xy
|
||||||
```
|
```
|
||||||
|
|
||||||
features:
|
Features:
|
||||||
|
|
||||||
- shutdown
|
- shutdown
|
||||||
- reboot
|
- reboot
|
||||||
- update
|
- update
|
||||||
|
218
LICENSE
218
LICENSE
@@ -1,29 +1,201 @@
|
|||||||
BSD 3-Clause License
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Copyright (c) 2017, Pascal Vizeli
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
1. Definitions.
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice, this
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
list of conditions and the following disclaimer.
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice,
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
this list of conditions and the following disclaimer in the documentation
|
the copyright owner that is granting the License.
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
* Neither the name of the copyright holder nor the names of its
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
contributors may be used to endorse or promote products derived from
|
other entities that control, are controlled by, or are under common
|
||||||
this software without specific prior written permission.
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
exercising permissions granted by this License.
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
including but not limited to software source code, documentation
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
source, and configuration files.
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
"Object" form shall mean any form resulting from mechanical
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
transformation or translation of a Source form, including but
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2017 Pascal Vizeli
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
# HassIO
|
# Hass.io
|
||||||
|
|
||||||
### First private cloud solution for home automation.
|
### First private cloud solution for home automation.
|
||||||
|
|
||||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
[HassIO-Addons](https://github.com/home-assistant/hassio-addons) | [HassIO-Build](https://github.com/home-assistant/hassio-build)
|
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
|
||||||
|
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
|
||||||
**HassIO is under active development and is not ready yet for production use.**
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
@@ -15,11 +15,11 @@ BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
|||||||
class AddonManager(object):
|
class AddonManager(object):
|
||||||
"""Manage addons inside HassIO."""
|
"""Manage addons inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock):
|
def __init__(self, config, loop, docker):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.config = config
|
self.config = config
|
||||||
self.dock = dock
|
self.docker = docker
|
||||||
self.data = Data(config)
|
self.data = Data(config)
|
||||||
self.addons = {}
|
self.addons = {}
|
||||||
self.repositories = {}
|
self.repositories = {}
|
||||||
@@ -78,7 +78,7 @@ class AddonManager(object):
|
|||||||
|
|
||||||
# don't add built-in repository to config
|
# don't add built-in repository to config
|
||||||
if url not in BUILTIN_REPOSITORIES:
|
if url not in BUILTIN_REPOSITORIES:
|
||||||
self.config.addons_repositories = url
|
self.config.add_addon_repository(url)
|
||||||
|
|
||||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||||
if tasks:
|
if tasks:
|
||||||
@@ -108,7 +108,7 @@ class AddonManager(object):
|
|||||||
tasks = []
|
tasks = []
|
||||||
for addon_slug in add_addons:
|
for addon_slug in add_addons:
|
||||||
addon = Addon(
|
addon = Addon(
|
||||||
self.config, self.loop, self.dock, self.data, addon_slug)
|
self.config, self.loop, self.docker, self.data, addon_slug)
|
||||||
|
|
||||||
tasks.append(addon.load())
|
tasks.append(addon.load())
|
||||||
self.addons[addon_slug] = addon
|
self.addons[addon_slug] = addon
|
||||||
|
@@ -8,44 +8,47 @@ import shutil
|
|||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from deepmerge import Merger
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .validate import (
|
from .validate import (
|
||||||
validate_options, SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
|
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI)
|
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||||
|
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT)
|
||||||
from .util import check_installed
|
from .util import check_installed
|
||||||
from ..dock.addon import DockerAddon
|
from ..dock.addon import DockerAddon
|
||||||
from ..tools import write_json_file, read_json_file
|
from ..tools import write_json_file, read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
|
||||||
RE_WEBUI = re.compile(r"^(.*\[HOST\]:)\[PORT:(\d+)\](.*)$")
|
RE_WEBUI = re.compile(r"^(.*\[HOST\]:)\[PORT:(\d+)\](.*)$")
|
||||||
|
|
||||||
|
MERGE_OPT = Merger([(dict, ['merge'])], ['override'], ['override'])
|
||||||
|
|
||||||
|
|
||||||
class Addon(object):
|
class Addon(object):
|
||||||
"""Hold data for addon inside HassIO."""
|
"""Hold data for addon inside HassIO."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, data, slug):
|
def __init__(self, config, loop, docker, data, slug):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.config = config
|
self.config = config
|
||||||
self.data = data
|
self.data = data
|
||||||
self._id = slug
|
self._id = slug
|
||||||
|
|
||||||
self.addon_docker = DockerAddon(config, loop, dock, self)
|
self.docker = DockerAddon(config, loop, docker, self)
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Async initialize of object."""
|
"""Async initialize of object."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
await self.addon_docker.attach()
|
await self.docker.attach()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
@@ -103,10 +106,10 @@ class Addon(object):
|
|||||||
def options(self):
|
def options(self):
|
||||||
"""Return options with local changes."""
|
"""Return options with local changes."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
return {
|
return MERGE_OPT.merge(
|
||||||
**self.data.system[self._id][ATTR_OPTIONS],
|
self.data.system[self._id][ATTR_OPTIONS],
|
||||||
**self.data.user[self._id][ATTR_OPTIONS],
|
self.data.user[self._id][ATTR_OPTIONS],
|
||||||
}
|
)
|
||||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
@options.setter
|
@options.setter
|
||||||
@@ -175,8 +178,8 @@ class Addon(object):
|
|||||||
@property
|
@property
|
||||||
def ports(self):
|
def ports(self):
|
||||||
"""Return ports of addon."""
|
"""Return ports of addon."""
|
||||||
if self.network_mode != 'bridge' or ATTR_PORTS not in self._mesh:
|
if self.host_network or ATTR_PORTS not in self._mesh:
|
||||||
return
|
return None
|
||||||
|
|
||||||
if not self.is_installed or \
|
if not self.is_installed or \
|
||||||
ATTR_NETWORK not in self.data.user[self._id]:
|
ATTR_NETWORK not in self.data.user[self._id]:
|
||||||
@@ -202,7 +205,7 @@ class Addon(object):
|
|||||||
def webui(self):
|
def webui(self):
|
||||||
"""Return URL to webui or None."""
|
"""Return URL to webui or None."""
|
||||||
if ATTR_WEBUI not in self._mesh:
|
if ATTR_WEBUI not in self._mesh:
|
||||||
return
|
return None
|
||||||
|
|
||||||
webui = self._mesh[ATTR_WEBUI]
|
webui = self._mesh[ATTR_WEBUI]
|
||||||
dock_port = RE_WEBUI.sub(r"\2", webui)
|
dock_port = RE_WEBUI.sub(r"\2", webui)
|
||||||
@@ -218,11 +221,9 @@ class Addon(object):
|
|||||||
return RE_WEBUI.sub(r"\g<1>{}\g<3>".format(real_port), webui)
|
return RE_WEBUI.sub(r"\g<1>{}\g<3>".format(real_port), webui)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def network_mode(self):
|
def host_network(self):
|
||||||
"""Return network mode of addon."""
|
"""Return True if addon run on host network."""
|
||||||
if self._mesh[ATTR_HOST_NETWORK]:
|
return self._mesh[ATTR_HOST_NETWORK]
|
||||||
return 'host'
|
|
||||||
return 'bridge'
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def devices(self):
|
def devices(self):
|
||||||
@@ -244,6 +245,56 @@ class Addon(object):
|
|||||||
"""Return list of privilege."""
|
"""Return list of privilege."""
|
||||||
return self._mesh.get(ATTR_PRIVILEGED)
|
return self._mesh.get(ATTR_PRIVILEGED)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_hassio_api(self):
|
||||||
|
"""Return True if the add-on access to hassio api."""
|
||||||
|
return self._mesh[ATTR_HASSIO_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_audio(self):
|
||||||
|
"""Return True if the add-on access to audio."""
|
||||||
|
return self._mesh[ATTR_AUDIO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_output(self):
|
||||||
|
"""Return ALSA config for output or None."""
|
||||||
|
if not self.with_audio:
|
||||||
|
return None
|
||||||
|
|
||||||
|
setting = self.config.audio_output
|
||||||
|
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
||||||
|
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||||
|
return setting
|
||||||
|
|
||||||
|
@audio_output.setter
|
||||||
|
def audio_output(self, value):
|
||||||
|
"""Set/remove custom audio output settings."""
|
||||||
|
if value is None:
|
||||||
|
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||||
|
else:
|
||||||
|
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||||
|
self.data.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_input(self):
|
||||||
|
"""Return ALSA config for input or None."""
|
||||||
|
if not self.with_audio:
|
||||||
|
return
|
||||||
|
|
||||||
|
setting = self.config.audio_input
|
||||||
|
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||||
|
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
||||||
|
return setting
|
||||||
|
|
||||||
|
@audio_input.setter
|
||||||
|
def audio_input(self, value):
|
||||||
|
"""Set/remove custom audio input settings."""
|
||||||
|
if value is None:
|
||||||
|
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||||
|
else:
|
||||||
|
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||||
|
self.data.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
"""Return url of addon."""
|
"""Return url of addon."""
|
||||||
@@ -336,6 +387,36 @@ class Addon(object):
|
|||||||
return vol.Schema(dict)
|
return vol.Schema(dict)
|
||||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||||
|
|
||||||
|
def test_udpate_schema(self):
|
||||||
|
"""Check if the exists config valid after update."""
|
||||||
|
if not self.is_installed or self.is_detached:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# load next schema
|
||||||
|
new_raw_schema = self.data.cache[self._id][ATTR_SCHEMA]
|
||||||
|
default_options = self.data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
|
# if disabled
|
||||||
|
if isinstance(new_raw_schema, bool):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# merge options
|
||||||
|
options = {
|
||||||
|
**self.data.user[self._id][ATTR_OPTIONS],
|
||||||
|
**default_options,
|
||||||
|
}
|
||||||
|
|
||||||
|
# create voluptuous
|
||||||
|
new_schema = \
|
||||||
|
vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
|
||||||
|
|
||||||
|
# validate
|
||||||
|
try:
|
||||||
|
new_schema(options)
|
||||||
|
except vol.Invalid:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
async def install(self, version=None):
|
async def install(self, version=None):
|
||||||
"""Install a addon."""
|
"""Install a addon."""
|
||||||
if self.config.arch not in self.supported_arch:
|
if self.config.arch not in self.supported_arch:
|
||||||
@@ -353,7 +434,7 @@ class Addon(object):
|
|||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
version = version or self.last_version
|
version = version or self.last_version
|
||||||
if not await self.addon_docker.install(version):
|
if not await self.docker.install(version):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._set_install(version)
|
self._set_install(version)
|
||||||
@@ -362,7 +443,7 @@ class Addon(object):
|
|||||||
@check_installed
|
@check_installed
|
||||||
async def uninstall(self):
|
async def uninstall(self):
|
||||||
"""Remove a addon."""
|
"""Remove a addon."""
|
||||||
if not await self.addon_docker.remove():
|
if not await self.docker.remove():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.path_data.is_dir():
|
if self.path_data.is_dir():
|
||||||
@@ -378,45 +459,82 @@ class Addon(object):
|
|||||||
if not self.is_installed:
|
if not self.is_installed:
|
||||||
return STATE_NONE
|
return STATE_NONE
|
||||||
|
|
||||||
if await self.addon_docker.is_running():
|
if await self.docker.is_running():
|
||||||
return STATE_STARTED
|
return STATE_STARTED
|
||||||
return STATE_STOPPED
|
return STATE_STOPPED
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def start(self):
|
def start(self):
|
||||||
"""Set options and start addon."""
|
"""Set options and start addon.
|
||||||
return await self.addon_docker.run()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.run()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def stop(self):
|
def stop(self):
|
||||||
"""Stop addon."""
|
"""Stop addon.
|
||||||
return await self.addon_docker.stop()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.stop()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def update(self, version=None):
|
async def update(self, version=None):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
version = version or self.last_version
|
version = version or self.last_version
|
||||||
|
last_state = await self.state()
|
||||||
|
|
||||||
if version == self.version_installed:
|
if version == self.version_installed:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Addon %s is already installed in %s", self._id, version)
|
"Addon %s is already installed in %s", self._id, version)
|
||||||
return True
|
|
||||||
|
|
||||||
if not await self.addon_docker.update(version):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if not await self.docker.update(version):
|
||||||
|
return False
|
||||||
self._set_update(version)
|
self._set_update(version)
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await self.docker.run()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def restart(self):
|
def restart(self):
|
||||||
"""Restart addon."""
|
"""Restart addon.
|
||||||
return await self.addon_docker.restart()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.restart()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def logs(self):
|
def logs(self):
|
||||||
"""Return addons log output."""
|
"""Return addons log output.
|
||||||
return await self.addon_docker.logs()
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.logs()
|
||||||
|
|
||||||
|
@check_installed
|
||||||
|
async def rebuild(self):
|
||||||
|
"""Performe a rebuild of local build addon."""
|
||||||
|
last_state = await self.state()
|
||||||
|
|
||||||
|
if not self.need_build:
|
||||||
|
_LOGGER.error("Can't rebuild a none local build addon!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# remove docker container but not addon config
|
||||||
|
if not await self.docker.remove():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not await self.docker.install(self.version_installed):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await self.docker.run()
|
||||||
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def snapshot(self, tar_file):
|
async def snapshot(self, tar_file):
|
||||||
@@ -424,7 +542,7 @@ class Addon(object):
|
|||||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||||
# store local image
|
# store local image
|
||||||
if self.need_build and not await \
|
if self.need_build and not await \
|
||||||
self.addon_docker.export_image(Path(temp, "image.tar")):
|
self.docker.export_image(Path(temp, "image.tar")):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@@ -489,15 +607,15 @@ class Addon(object):
|
|||||||
|
|
||||||
# check version / restore image
|
# check version / restore image
|
||||||
version = data[ATTR_VERSION]
|
version = data[ATTR_VERSION]
|
||||||
if version != self.addon_docker.version:
|
if version != self.docker.version:
|
||||||
image_file = Path(temp, "image.tar")
|
image_file = Path(temp, "image.tar")
|
||||||
if image_file.is_file():
|
if image_file.is_file():
|
||||||
await self.addon_docker.import_image(image_file, version)
|
await self.docker.import_image(image_file, version)
|
||||||
else:
|
else:
|
||||||
if await self.addon_docker.install(version):
|
if await self.docker.install(version):
|
||||||
await self.addon_docker.cleanup()
|
await self.docker.cleanup()
|
||||||
else:
|
else:
|
||||||
await self.addon_docker.stop()
|
await self.docker.stop()
|
||||||
|
|
||||||
# restore data
|
# restore data
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
|
@@ -3,15 +3,13 @@ import copy
|
|||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .util import extract_hash_from_path
|
from .util import extract_hash_from_path
|
||||||
from .validate import (
|
from .validate import (
|
||||||
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
|
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG)
|
||||||
MAP_VOLUME)
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
|
||||||
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
|
||||||
@@ -19,8 +17,6 @@ from ..tools import JsonConfig, read_json_file
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_VOLUME = re.compile(MAP_VOLUME)
|
|
||||||
|
|
||||||
|
|
||||||
class Data(JsonConfig):
|
class Data(JsonConfig):
|
||||||
"""Hold data for addons inside HassIO."""
|
"""Hold data for addons inside HassIO."""
|
||||||
@@ -118,7 +114,7 @@ class Data(JsonConfig):
|
|||||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||||
self._cache[addon_slug] = addon_config
|
self._cache[addon_slug] = addon_config
|
||||||
|
|
||||||
except OSError:
|
except (OSError, json.JSONDecodeError):
|
||||||
_LOGGER.warning("Can't read %s", addon)
|
_LOGGER.warning("Can't read %s", addon)
|
||||||
|
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
"""Init file for HassIO addons git."""
|
"""Init file for HassIO addons git."""
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import functools as ft
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
@@ -48,7 +49,9 @@ class GitRepo(object):
|
|||||||
try:
|
try:
|
||||||
_LOGGER.info("Clone addon %s repository", self.url)
|
_LOGGER.info("Clone addon %s repository", self.url)
|
||||||
self.repo = await self.loop.run_in_executor(
|
self.repo = await self.loop.run_in_executor(
|
||||||
None, git.Repo.clone_from, self.url, str(self.path))
|
None, ft.partial(
|
||||||
|
git.Repo.clone_from, self.url, str(self.path),
|
||||||
|
recursive=True))
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
git.GitCommandError) as err:
|
git.GitCommandError) as err:
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
"""Validate addons options schema."""
|
"""Validate addons options schema."""
|
||||||
|
import re
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -10,11 +12,12 @@ from ..const import (
|
|||||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||||
ATTR_AUTO_UPDATE, ATTR_WEBUI)
|
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS
|
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API)
|
||||||
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||||
|
|
||||||
|
|
||||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||||
|
|
||||||
V_STR = 'str'
|
V_STR = 'str'
|
||||||
V_INT = 'int'
|
V_INT = 'int'
|
||||||
@@ -23,8 +26,18 @@ V_BOOL = 'bool'
|
|||||||
V_EMAIL = 'email'
|
V_EMAIL = 'email'
|
||||||
V_URL = 'url'
|
V_URL = 'url'
|
||||||
V_PORT = 'port'
|
V_PORT = 'port'
|
||||||
|
V_MATCH = 'match'
|
||||||
|
|
||||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
|
RE_SCHEMA_ELEMENT = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"|str|bool|email|url|port"
|
||||||
|
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||||
|
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||||
|
r"|match\((?P<match>.*)\)"
|
||||||
|
r")$"
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||||
|
|
||||||
ARCH_ALL = [
|
ARCH_ALL = [
|
||||||
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
|
||||||
@@ -35,16 +48,15 @@ STARTUP_ALL = [
|
|||||||
STARTUP_APPLICATION
|
STARTUP_APPLICATION
|
||||||
]
|
]
|
||||||
|
|
||||||
PRIVILEGE_ALL = [
|
PRIVILEGED_ALL = [
|
||||||
"NET_ADMIN"
|
"NET_ADMIN",
|
||||||
|
"SYS_ADMIN",
|
||||||
|
"SYS_RAWIO"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _migrate_startup(value):
|
def _simple_startup(value):
|
||||||
"""Migrate startup schema.
|
"""Simple startup schema."""
|
||||||
|
|
||||||
REMOVE after 0.50-
|
|
||||||
"""
|
|
||||||
if value == "before":
|
if value == "before":
|
||||||
return STARTUP_SERVICES
|
return STARTUP_SERVICES
|
||||||
if value == "after":
|
if value == "after":
|
||||||
@@ -61,7 +73,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_URL): vol.Url(),
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||||
vol.Required(ATTR_STARTUP):
|
vol.Required(ATTR_STARTUP):
|
||||||
vol.All(_migrate_startup, vol.In(STARTUP_ALL)),
|
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||||
vol.Required(ATTR_BOOT):
|
vol.Required(ATTR_BOOT):
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||||
@@ -71,14 +83,16 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
vol.Optional(ATTR_TMPFS):
|
vol.Optional(ATTR_TMPFS):
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGE_ALL)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
vol.Any(SCHEMA_ELEMENT, {vol.Coerce(str): SCHEMA_ELEMENT})
|
||||||
], vol.Schema({vol.Coerce(str): ADDON_ELEMENT}))
|
], vol.Schema({vol.Coerce(str): SCHEMA_ELEMENT}))
|
||||||
}), False),
|
}), False),
|
||||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||||
@@ -97,11 +111,13 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
SCHEMA_ADDON_USER = vol.Schema({
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BOOT):
|
vol.Optional(ATTR_BOOT):
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
@@ -168,20 +184,32 @@ def _single_validate(typ, value, key):
|
|||||||
if value is None:
|
if value is None:
|
||||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||||
|
|
||||||
if typ == V_STR:
|
# parse extend data from type
|
||||||
|
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||||
|
|
||||||
|
# prepare range
|
||||||
|
range_args = {}
|
||||||
|
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||||
|
group_value = match.group(group_name)
|
||||||
|
if group_value:
|
||||||
|
range_args[group_name[2:]] = float(group_value)
|
||||||
|
|
||||||
|
if typ.startswith(V_STR):
|
||||||
return str(value)
|
return str(value)
|
||||||
elif typ == V_INT:
|
elif typ.startswith(V_INT):
|
||||||
return int(value)
|
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||||
elif typ == V_FLOAT:
|
elif typ.startswith(V_FLOAT):
|
||||||
return float(value)
|
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||||
elif typ == V_BOOL:
|
elif typ.startswith(V_BOOL):
|
||||||
return vol.Boolean()(value)
|
return vol.Boolean()(value)
|
||||||
elif typ == V_EMAIL:
|
elif typ.startswith(V_EMAIL):
|
||||||
return vol.Email()(value)
|
return vol.Email()(value)
|
||||||
elif typ == V_URL:
|
elif typ.startswith(V_URL):
|
||||||
return vol.Url()(value)
|
return vol.Url()(value)
|
||||||
elif typ == V_PORT:
|
elif typ.startswith(V_PORT):
|
||||||
return NETWORK_PORT(value)
|
return NETWORK_PORT(value)
|
||||||
|
elif typ.startswith(V_MATCH):
|
||||||
|
return vol.Match(match.group('match'))(str(value))
|
||||||
|
|
||||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@@ -28,14 +28,16 @@ class RestAPI(object):
|
|||||||
self._handler = None
|
self._handler = None
|
||||||
self.server = None
|
self.server = None
|
||||||
|
|
||||||
def register_host(self, host_control):
|
def register_host(self, host_control, hardware):
|
||||||
"""Register hostcontrol function."""
|
"""Register hostcontrol function."""
|
||||||
api_host = APIHost(self.config, self.loop, host_control)
|
api_host = APIHost(self.config, self.loop, host_control, hardware)
|
||||||
|
|
||||||
self.webapp.router.add_get('/host/info', api_host.info)
|
self.webapp.router.add_get('/host/info', api_host.info)
|
||||||
|
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||||
self.webapp.router.add_post('/host/update', api_host.update)
|
self.webapp.router.add_post('/host/update', api_host.update)
|
||||||
|
self.webapp.router.add_post('/host/options', api_host.options)
|
||||||
|
|
||||||
def register_network(self, host_control):
|
def register_network(self, host_control):
|
||||||
"""Register network function."""
|
"""Register network function."""
|
||||||
@@ -45,11 +47,11 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_post('/network/options', api_net.options)
|
self.webapp.router.add_post('/network/options', api_net.options)
|
||||||
|
|
||||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||||
websession):
|
updater):
|
||||||
"""Register supervisor function."""
|
"""Register supervisor function."""
|
||||||
api_supervisor = APISupervisor(
|
api_supervisor = APISupervisor(
|
||||||
self.config, self.loop, supervisor, snapshots, addons,
|
self.config, self.loop, supervisor, snapshots, addons,
|
||||||
host_control, websession)
|
host_control, updater)
|
||||||
|
|
||||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||||
@@ -66,10 +68,13 @@ class RestAPI(object):
|
|||||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||||
|
|
||||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||||
|
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||||
|
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||||
|
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||||
|
|
||||||
def register_addons(self, addons):
|
def register_addons(self, addons):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
@@ -91,6 +96,8 @@ class RestAPI(object):
|
|||||||
'/addons/{addon}/update', api_addons.update)
|
'/addons/{addon}/update', api_addons.update)
|
||||||
self.webapp.router.add_post(
|
self.webapp.router.add_post(
|
||||||
'/addons/{addon}/options', api_addons.options)
|
'/addons/{addon}/options', api_addons.options)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||||
|
|
||||||
|
@@ -11,8 +11,9 @@ from ..const import (
|
|||||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||||
|
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||||
from ..validate import DOCKER_PORTS
|
from ..validate import DOCKER_PORTS
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -49,6 +50,14 @@ class APIAddons(object):
|
|||||||
|
|
||||||
return addon
|
return addon
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _pretty_devices(addon):
|
||||||
|
"""Return a simplified device list."""
|
||||||
|
dev_list = addon.devices
|
||||||
|
if not dev_list:
|
||||||
|
return
|
||||||
|
return [row.split(':')[0] for row in dev_list]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return all addons / repositories ."""
|
"""Return all addons / repositories ."""
|
||||||
@@ -64,8 +73,12 @@ class APIAddons(object):
|
|||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||||
|
ATTR_AUDIO: addon.with_audio,
|
||||||
})
|
})
|
||||||
|
|
||||||
data_repositories = []
|
data_repositories = []
|
||||||
@@ -108,9 +121,15 @@ class APIAddons(object):
|
|||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_NETWORK: addon.ports,
|
ATTR_NETWORK: addon.ports,
|
||||||
ATTR_HOST_NETWORK: addon.network_mode == 'host',
|
ATTR_HOST_NETWORK: addon.host_network,
|
||||||
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
ATTR_WEBUI: addon.webui,
|
ATTR_WEBUI: addon.webui,
|
||||||
|
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||||
|
ATTR_AUDIO: addon.with_audio,
|
||||||
|
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||||
|
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -132,6 +151,10 @@ class APIAddons(object):
|
|||||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||||
if ATTR_NETWORK in body:
|
if ATTR_NETWORK in body:
|
||||||
addon.ports = body[ATTR_NETWORK]
|
addon.ports = body[ATTR_NETWORK]
|
||||||
|
if ATTR_AUDIO_INPUT in body:
|
||||||
|
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
|
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -140,19 +163,19 @@ class APIAddons(object):
|
|||||||
"""Install addon."""
|
"""Install addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
version = body.get(ATTR_VERSION)
|
version = body.get(ATTR_VERSION, addon.last_version)
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
addon.install(version=version), loop=self.loop)
|
addon.install(version=version), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def uninstall(self, request):
|
def uninstall(self, request):
|
||||||
"""Uninstall addon."""
|
"""Uninstall addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return await asyncio.shield(addon.uninstall(), loop=self.loop)
|
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def start(self, request):
|
def start(self, request):
|
||||||
"""Start addon."""
|
"""Start addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
@@ -163,29 +186,41 @@ class APIAddons(object):
|
|||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
raise RuntimeError(humanize_error(options, ex)) from None
|
raise RuntimeError(humanize_error(options, ex)) from None
|
||||||
|
|
||||||
return await asyncio.shield(addon.start(), loop=self.loop)
|
return asyncio.shield(addon.start(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stop(self, request):
|
def stop(self, request):
|
||||||
"""Stop addon."""
|
"""Stop addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return await asyncio.shield(addon.stop(), loop=self.loop)
|
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
version = body.get(ATTR_VERSION)
|
version = body.get(ATTR_VERSION, addon.last_version)
|
||||||
|
|
||||||
|
if version == addon.version_installed:
|
||||||
|
raise RuntimeError("Version %s is already in use", version)
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
addon.update(version=version), loop=self.loop)
|
addon.update(version=version), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request):
|
def restart(self, request):
|
||||||
"""Restart addon."""
|
"""Restart addon."""
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
return await asyncio.shield(addon.restart(), loop=self.loop)
|
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def rebuild(self, request):
|
||||||
|
"""Rebuild local build addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
if not addon.need_build:
|
||||||
|
raise RuntimeError("Only local build addons are supported")
|
||||||
|
|
||||||
|
return asyncio.shield(addon.rebuild(), loop=self.loop)
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
|
@@ -7,14 +7,16 @@ import voluptuous as vol
|
|||||||
from .util import api_process, api_process_raw, api_validate
|
from .util import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||||
CONTENT_TYPE_BINARY)
|
ATTR_BOOT, CONTENT_TYPE_BINARY)
|
||||||
from ..validate import HASS_DEVICES
|
from ..validate import HASS_DEVICES
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
|
||||||
|
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||||
vol.Any(None, vol.Coerce(str)),
|
vol.Any(None, vol.Coerce(str)),
|
||||||
@@ -43,6 +45,7 @@ class APIHomeAssistant(object):
|
|||||||
ATTR_IMAGE: self.homeassistant.image,
|
ATTR_IMAGE: self.homeassistant.image,
|
||||||
ATTR_DEVICES: self.homeassistant.devices,
|
ATTR_DEVICES: self.homeassistant.devices,
|
||||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||||
|
ATTR_BOOT: self.homeassistant.boot,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -57,33 +60,48 @@ class APIHomeAssistant(object):
|
|||||||
self.homeassistant.set_custom(
|
self.homeassistant.set_custom(
|
||||||
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
|
||||||
|
|
||||||
|
if ATTR_BOOT in body:
|
||||||
|
self.homeassistant.boot = body[ATTR_BOOT]
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update homeassistant."""
|
"""Update homeassistant."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.config.last_homeassistant)
|
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||||
|
|
||||||
if self.homeassistant.in_progress:
|
if version == self.homeassistant.version:
|
||||||
raise RuntimeError("Other task is in progress")
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.homeassistant.update(version), loop=self.loop)
|
self.homeassistant.update(version), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request):
|
def stop(self, request):
|
||||||
"""Restart homeassistant."""
|
"""Stop homeassistant."""
|
||||||
if self.homeassistant.in_progress:
|
return asyncio.shield(self.homeassistant.stop(), loop=self.loop)
|
||||||
raise RuntimeError("Other task is in progress")
|
|
||||||
|
|
||||||
return await asyncio.shield(
|
@api_process
|
||||||
self.homeassistant.restart(), loop=self.loop)
|
def start(self, request):
|
||||||
|
"""Start homeassistant."""
|
||||||
|
return asyncio.shield(self.homeassistant.run(), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def restart(self, request):
|
||||||
|
"""Restart homeassistant."""
|
||||||
|
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return homeassistant docker logs.
|
"""Return homeassistant docker logs."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.homeassistant.logs()
|
return self.homeassistant.logs()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def check(self, request):
|
||||||
|
"""Check config of homeassistant."""
|
||||||
|
code, message = await self.homeassistant.check_config()
|
||||||
|
if not code:
|
||||||
|
raise RuntimeError(message)
|
||||||
|
|
||||||
|
return True
|
||||||
|
@@ -7,7 +7,9 @@ import voluptuous as vol
|
|||||||
from .util import api_process_hostcontrol, api_process, api_validate
|
from .util import api_process_hostcontrol, api_process, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||||
ATTR_OS)
|
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_AUDIO_OUTPUT)
|
||||||
|
from ..validate import ALSA_CHANNEL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -15,15 +17,21 @@ SCHEMA_VERSION = vol.Schema({
|
|||||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
SCHEMA_OPTIONS = vol.Schema({
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
class APIHost(object):
|
class APIHost(object):
|
||||||
"""Handle rest api for host functions."""
|
"""Handle rest api for host functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, host_control):
|
def __init__(self, config, loop, host_control, hardware):
|
||||||
"""Initialize host rest api part."""
|
"""Initialize host rest api part."""
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.host_control = host_control
|
self.host_control = host_control
|
||||||
|
self.local_hw = hardware
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
@@ -37,6 +45,18 @@ class APIHost(object):
|
|||||||
ATTR_OS: self.host_control.os_info,
|
ATTR_OS: self.host_control.os_info,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request):
|
||||||
|
"""Process host options."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
|
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
if ATTR_AUDIO_INPUT in body:
|
||||||
|
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@api_process_hostcontrol
|
@api_process_hostcontrol
|
||||||
def reboot(self, request):
|
def reboot(self, request):
|
||||||
"""Reboot host."""
|
"""Reboot host."""
|
||||||
@@ -54,7 +74,17 @@ class APIHost(object):
|
|||||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||||
|
|
||||||
if version == self.host_control.version:
|
if version == self.host_control.version:
|
||||||
raise RuntimeError("Version is already in use")
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.host_control.update(version=version), loop=self.loop)
|
self.host_control.update(version=version), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def hardware(self, request):
|
||||||
|
"""Return local hardware infos."""
|
||||||
|
return {
|
||||||
|
ATTR_SERIAL: self.local_hw.serial_devices,
|
||||||
|
ATTR_INPUT: self.local_hw.input_devices,
|
||||||
|
ATTR_DISK: self.local_hw.disk_devices,
|
||||||
|
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||||
|
}
|
||||||
|
@@ -98,5 +98,5 @@ class APISecurity(object):
|
|||||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||||
|
|
||||||
# store session
|
# store session
|
||||||
self.config.security_sessions = (session, valid_until)
|
self.config.add_security_session(session, valid_until)
|
||||||
return {ATTR_SESSION: session}
|
return {ATTR_SESSION: session}
|
||||||
|
@@ -111,10 +111,10 @@ class APISnapshots(object):
|
|||||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_full(self, request):
|
def restore_full(self, request):
|
||||||
"""Full-Restore a snapshot."""
|
"""Full-Restore a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return await asyncio.shield(
|
return asyncio.shield(
|
||||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -125,7 +125,8 @@ class APISnapshots(object):
|
|||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.snapshots.do_restore_partial(snapshot, **body),
|
self.snapshots.do_restore_partial(snapshot, **body),
|
||||||
loop=self.loop)
|
loop=self.loop
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request):
|
||||||
|
@@ -10,7 +10,7 @@ from ..const import (
|
|||||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||||
from ..tools import validate_timezone
|
from ..validate import validate_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -30,7 +30,7 @@ class APISupervisor(object):
|
|||||||
"""Handle rest api for supervisor functions."""
|
"""Handle rest api for supervisor functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||||
host_control, websession):
|
host_control, updater):
|
||||||
"""Initialize supervisor rest api part."""
|
"""Initialize supervisor rest api part."""
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
@@ -38,7 +38,7 @@ class APISupervisor(object):
|
|||||||
self.addons = addons
|
self.addons = addons
|
||||||
self.snapshots = snapshots
|
self.snapshots = snapshots
|
||||||
self.host_control = host_control
|
self.host_control = host_control
|
||||||
self.websession = websession
|
self.updater = updater
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def ping(self, request):
|
async def ping(self, request):
|
||||||
@@ -64,8 +64,8 @@ class APISupervisor(object):
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: HASSIO_VERSION,
|
ATTR_VERSION: HASSIO_VERSION,
|
||||||
ATTR_LAST_VERSION: self.config.last_hassio,
|
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||||
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||||
ATTR_ARCH: self.config.arch,
|
ATTR_ARCH: self.config.arch,
|
||||||
ATTR_TIMEZONE: self.config.timezone,
|
ATTR_TIMEZONE: self.config.timezone,
|
||||||
ATTR_ADDONS: list_addons,
|
ATTR_ADDONS: list_addons,
|
||||||
@@ -78,7 +78,7 @@ class APISupervisor(object):
|
|||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_BETA_CHANNEL in body:
|
if ATTR_BETA_CHANNEL in body:
|
||||||
self.config.upstream_beta = body[ATTR_BETA_CHANNEL]
|
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||||
|
|
||||||
if ATTR_TIMEZONE in body:
|
if ATTR_TIMEZONE in body:
|
||||||
self.config.timezone = body[ATTR_TIMEZONE]
|
self.config.timezone = body[ATTR_TIMEZONE]
|
||||||
@@ -93,10 +93,10 @@ class APISupervisor(object):
|
|||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update supervisor OS."""
|
"""Update supervisor OS."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.config.last_hassio)
|
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||||
|
|
||||||
if version == self.supervisor.version:
|
if version == self.supervisor.version:
|
||||||
raise RuntimeError("Version is already in use")
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self.supervisor.update(version), loop=self.loop)
|
self.supervisor.update(version), loop=self.loop)
|
||||||
@@ -107,7 +107,7 @@ class APISupervisor(object):
|
|||||||
tasks = [
|
tasks = [
|
||||||
self.addons.reload(),
|
self.addons.reload(),
|
||||||
self.snapshots.reload(),
|
self.snapshots.reload(),
|
||||||
self.config.fetch_update_infos(self.websession),
|
self.updater.fetch_data(),
|
||||||
self.host_control.load()
|
self.host_control.load()
|
||||||
]
|
]
|
||||||
results, _ = await asyncio.shield(
|
results, _ = await asyncio.shield(
|
||||||
@@ -121,8 +121,5 @@ class APISupervisor(object):
|
|||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return supervisor docker logs.
|
"""Return supervisor docker logs."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.supervisor.logs()
|
return self.supervisor.logs()
|
||||||
|
@@ -87,9 +87,6 @@ def api_process_raw(content):
|
|||||||
|
|
||||||
def api_return_error(message=None):
|
def api_return_error(message=None):
|
||||||
"""Return a API error message."""
|
"""Return a API error message."""
|
||||||
if message:
|
|
||||||
_LOGGER.error(message)
|
|
||||||
|
|
||||||
return web.json_response({
|
return web.json_response({
|
||||||
JSON_RESULT: RESULT_ERROR,
|
JSON_RESULT: RESULT_ERROR,
|
||||||
JSON_MESSAGE: message,
|
JSON_MESSAGE: message,
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from colorlog import ColoredFormatter
|
from colorlog import ColoredFormatter
|
||||||
@@ -100,6 +101,7 @@ def initialize_logging():
|
|||||||
|
|
||||||
def check_environment():
|
def check_environment():
|
||||||
"""Check if all environment are exists."""
|
"""Check if all environment are exists."""
|
||||||
|
# check environment variables
|
||||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||||
'HOMEASSISTANT_REPOSITORY'):
|
'HOMEASSISTANT_REPOSITORY'):
|
||||||
try:
|
try:
|
||||||
@@ -108,10 +110,16 @@ def check_environment():
|
|||||||
_LOGGER.fatal("Can't find %s in env!", key)
|
_LOGGER.fatal("Can't find %s in env!", key)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# check docker socket
|
||||||
if not SOCKET_DOCKER.is_socket():
|
if not SOCKET_DOCKER.is_socket():
|
||||||
_LOGGER.fatal("Can't find docker socket!")
|
_LOGGER.fatal("Can't find docker socket!")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# check socat exec
|
||||||
|
if not shutil.which('socat'):
|
||||||
|
_LOGGER.fatal("Can0t find socat program!")
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
173
hassio/config.py
173
hassio/config.py
@@ -4,55 +4,29 @@ import logging
|
|||||||
import os
|
import os
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
import voluptuous as vol
|
from .const import (
|
||||||
|
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||||
from .const import FILE_HASSIO_CONFIG, HASSIO_DATA
|
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||||
from .tools import fetch_last_versions, JsonConfig, validate_timezone
|
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT)
|
||||||
|
from .tools import JsonConfig, parse_datetime
|
||||||
|
from .validate import SCHEMA_HASSIO_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
|
||||||
|
|
||||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||||
HOMEASSISTANT_LAST = 'homeassistant_last'
|
|
||||||
|
|
||||||
HASSIO_SSL = PurePath("ssl")
|
HASSIO_SSL = PurePath("ssl")
|
||||||
HASSIO_LAST = 'hassio_last'
|
|
||||||
|
|
||||||
ADDONS_CORE = PurePath("addons/core")
|
ADDONS_CORE = PurePath("addons/core")
|
||||||
ADDONS_LOCAL = PurePath("addons/local")
|
ADDONS_LOCAL = PurePath("addons/local")
|
||||||
ADDONS_GIT = PurePath("addons/git")
|
ADDONS_GIT = PurePath("addons/git")
|
||||||
ADDONS_DATA = PurePath("addons/data")
|
ADDONS_DATA = PurePath("addons/data")
|
||||||
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
|
||||||
|
|
||||||
BACKUP_DATA = PurePath("backup")
|
BACKUP_DATA = PurePath("backup")
|
||||||
SHARE_DATA = PurePath("share")
|
SHARE_DATA = PurePath("share")
|
||||||
TMP_DATA = PurePath("tmp")
|
TMP_DATA = PurePath("tmp")
|
||||||
|
|
||||||
UPSTREAM_BETA = 'upstream_beta'
|
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||||
API_ENDPOINT = 'api_endpoint'
|
|
||||||
TIMEZONE = 'timezone'
|
|
||||||
|
|
||||||
SECURITY_INITIALIZE = 'security_initialize'
|
|
||||||
SECURITY_TOTP = 'security_totp'
|
|
||||||
SECURITY_PASSWORD = 'security_password'
|
|
||||||
SECURITY_SESSIONS = 'security_sessions'
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_CONFIG = vol.Schema({
|
|
||||||
vol.Optional(UPSTREAM_BETA, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(API_ENDPOINT): vol.Coerce(str),
|
|
||||||
vol.Optional(TIMEZONE, default='UTC'): validate_timezone,
|
|
||||||
vol.Optional(HOMEASSISTANT_LAST): vol.Coerce(str),
|
|
||||||
vol.Optional(HASSIO_LAST): vol.Coerce(str),
|
|
||||||
vol.Optional(ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
|
||||||
vol.Optional(SECURITY_INITIALIZE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(SECURITY_TOTP): vol.Coerce(str),
|
|
||||||
vol.Optional(SECURITY_PASSWORD): vol.Coerce(str),
|
|
||||||
vol.Optional(SECURITY_SESSIONS, default={}):
|
|
||||||
{vol.Coerce(str): vol.Coerce(str)},
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(JsonConfig):
|
class CoreConfig(JsonConfig):
|
||||||
@@ -60,64 +34,35 @@ class CoreConfig(JsonConfig):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize config object."""
|
"""Initialize config object."""
|
||||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_CONFIG)
|
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||||
self.arch = None
|
self.arch = None
|
||||||
|
|
||||||
async def fetch_update_infos(self, websession):
|
|
||||||
"""Read current versions from web."""
|
|
||||||
last = await fetch_last_versions(websession, beta=self.upstream_beta)
|
|
||||||
|
|
||||||
if last:
|
|
||||||
self._data.update({
|
|
||||||
HOMEASSISTANT_LAST: last.get('homeassistant'),
|
|
||||||
HASSIO_LAST: last.get('hassio'),
|
|
||||||
})
|
|
||||||
self.save()
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_endpoint(self):
|
|
||||||
"""Return IP address of api endpoint."""
|
|
||||||
return self._data[API_ENDPOINT]
|
|
||||||
|
|
||||||
@api_endpoint.setter
|
|
||||||
def api_endpoint(self, value):
|
|
||||||
"""Store IP address of api endpoint."""
|
|
||||||
self._data[API_ENDPOINT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def upstream_beta(self):
|
|
||||||
"""Return True if we run in beta upstream."""
|
|
||||||
return self._data[UPSTREAM_BETA]
|
|
||||||
|
|
||||||
@upstream_beta.setter
|
|
||||||
def upstream_beta(self, value):
|
|
||||||
"""Set beta upstream mode."""
|
|
||||||
self._data[UPSTREAM_BETA] = bool(value)
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
"""Return system timezone."""
|
"""Return system timezone."""
|
||||||
return self._data[TIMEZONE]
|
return self._data[ATTR_TIMEZONE]
|
||||||
|
|
||||||
@timezone.setter
|
@timezone.setter
|
||||||
def timezone(self, value):
|
def timezone(self, value):
|
||||||
"""Set system timezone."""
|
"""Set system timezone."""
|
||||||
self._data[TIMEZONE] = value
|
self._data[ATTR_TIMEZONE] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_homeassistant(self):
|
def last_boot(self):
|
||||||
"""Actual version of homeassistant."""
|
"""Return last boot datetime."""
|
||||||
return self._data.get(HOMEASSISTANT_LAST)
|
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||||
|
|
||||||
@property
|
boot_time = parse_datetime(boot_str)
|
||||||
def last_hassio(self):
|
if not boot_time:
|
||||||
"""Actual version of hassio."""
|
return datetime.utcfromtimestamp(1)
|
||||||
return self._data.get(HASSIO_LAST)
|
return boot_time
|
||||||
|
|
||||||
|
@last_boot.setter
|
||||||
|
def last_boot(self, value):
|
||||||
|
"""Set last boot datetime."""
|
||||||
|
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||||
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_hassio(self):
|
def path_hassio(self):
|
||||||
@@ -207,73 +152,95 @@ class CoreConfig(JsonConfig):
|
|||||||
@property
|
@property
|
||||||
def addons_repositories(self):
|
def addons_repositories(self):
|
||||||
"""Return list of addons custom repositories."""
|
"""Return list of addons custom repositories."""
|
||||||
return self._data[ADDONS_CUSTOM_LIST]
|
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||||
|
|
||||||
@addons_repositories.setter
|
def add_addon_repository(self, repo):
|
||||||
def addons_repositories(self, repo):
|
|
||||||
"""Add a custom repository to list."""
|
"""Add a custom repository to list."""
|
||||||
if repo in self._data[ADDONS_CUSTOM_LIST]:
|
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._data[ADDONS_CUSTOM_LIST].append(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def drop_addon_repository(self, repo):
|
def drop_addon_repository(self, repo):
|
||||||
"""Remove a custom repository from list."""
|
"""Remove a custom repository from list."""
|
||||||
if repo not in self._data[ADDONS_CUSTOM_LIST]:
|
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._data[ADDONS_CUSTOM_LIST].remove(repo)
|
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_initialize(self):
|
def security_initialize(self):
|
||||||
"""Return is security was initialize."""
|
"""Return is security was initialize."""
|
||||||
return self._data[SECURITY_INITIALIZE]
|
return self._data[ATTR_SECURITY]
|
||||||
|
|
||||||
@security_initialize.setter
|
@security_initialize.setter
|
||||||
def security_initialize(self, value):
|
def security_initialize(self, value):
|
||||||
"""Set is security initialize."""
|
"""Set is security initialize."""
|
||||||
self._data[SECURITY_INITIALIZE] = value
|
self._data[ATTR_SECURITY] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_totp(self):
|
def security_totp(self):
|
||||||
"""Return the TOTP key."""
|
"""Return the TOTP key."""
|
||||||
return self._data.get(SECURITY_TOTP)
|
return self._data.get(ATTR_TOTP)
|
||||||
|
|
||||||
@security_totp.setter
|
@security_totp.setter
|
||||||
def security_totp(self, value):
|
def security_totp(self, value):
|
||||||
"""Set the TOTP key."""
|
"""Set the TOTP key."""
|
||||||
self._data[SECURITY_TOTP] = value
|
self._data[ATTR_TOTP] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_password(self):
|
def security_password(self):
|
||||||
"""Return the password key."""
|
"""Return the password key."""
|
||||||
return self._data.get(SECURITY_PASSWORD)
|
return self._data.get(ATTR_PASSWORD)
|
||||||
|
|
||||||
@security_password.setter
|
@security_password.setter
|
||||||
def security_password(self, value):
|
def security_password(self, value):
|
||||||
"""Set the password key."""
|
"""Set the password key."""
|
||||||
self._data[SECURITY_PASSWORD] = value
|
self._data[ATTR_PASSWORD] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def security_sessions(self):
|
def security_sessions(self):
|
||||||
"""Return api sessions."""
|
"""Return api sessions."""
|
||||||
return {session: datetime.strptime(until, DATETIME_FORMAT) for
|
return {
|
||||||
session, until in self._data[SECURITY_SESSIONS].items()}
|
session: parse_datetime(until) for
|
||||||
|
session, until in self._data[ATTR_SESSIONS].items()
|
||||||
|
}
|
||||||
|
|
||||||
@security_sessions.setter
|
def add_security_session(self, session, valid):
|
||||||
def security_sessions(self, value):
|
|
||||||
"""Set the a new session."""
|
"""Set the a new session."""
|
||||||
session, valid = value
|
self._data[ATTR_SESSIONS].update(
|
||||||
if valid is None:
|
{session: valid.isoformat()}
|
||||||
self._data[SECURITY_SESSIONS].pop(session, None)
|
)
|
||||||
else:
|
self.save()
|
||||||
self._data[SECURITY_SESSIONS].update(
|
|
||||||
{session: valid.strftime(DATETIME_FORMAT)}
|
def drop_security_session(self, session):
|
||||||
)
|
"""Delete the a session."""
|
||||||
|
self._data[ATTR_SESSIONS].pop(session, None)
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_output(self):
|
||||||
|
"""Return ALSA audio output card,dev."""
|
||||||
|
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||||
|
|
||||||
|
@audio_output.setter
|
||||||
|
def audio_output(self, value):
|
||||||
|
"""Set ALSA audio output card,dev."""
|
||||||
|
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_input(self):
|
||||||
|
"""Return ALSA audio input card,dev."""
|
||||||
|
return self._data.get(ATTR_AUDIO_INPUT)
|
||||||
|
|
||||||
|
@audio_input.setter
|
||||||
|
def audio_input(self, value):
|
||||||
|
"""Set ALSA audio input card,dev."""
|
||||||
|
self._data[ATTR_AUDIO_INPUT] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
@@ -1,12 +1,11 @@
|
|||||||
"""Const file for HassIO."""
|
"""Const file for HassIO."""
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '0.46'
|
HASSIO_VERSION = '0.62'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
'hassio/master/version.json')
|
'hassio/{}/version.json')
|
||||||
URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
|
|
||||||
'hassio/dev/version.json')
|
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||||
|
|
||||||
@@ -25,10 +24,15 @@ RESTART_EXIT_CODE = 100
|
|||||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||||
|
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||||
|
|
||||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||||
|
|
||||||
|
DOCKER_NETWORK = 'hassio'
|
||||||
|
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
|
||||||
|
DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
|
||||||
|
|
||||||
LABEL_VERSION = 'io.hass.version'
|
LABEL_VERSION = 'io.hass.version'
|
||||||
LABEL_ARCH = 'io.hass.arch'
|
LABEL_ARCH = 'io.hass.arch'
|
||||||
LABEL_TYPE = 'io.hass.type'
|
LABEL_TYPE = 'io.hass.type'
|
||||||
@@ -57,6 +61,7 @@ ATTR_SOURCE = 'source'
|
|||||||
ATTR_FEATURES = 'features'
|
ATTR_FEATURES = 'features'
|
||||||
ATTR_ADDONS = 'addons'
|
ATTR_ADDONS = 'addons'
|
||||||
ATTR_VERSION = 'version'
|
ATTR_VERSION = 'version'
|
||||||
|
ATTR_LAST_BOOT = 'last_boot'
|
||||||
ATTR_LAST_VERSION = 'last_version'
|
ATTR_LAST_VERSION = 'last_version'
|
||||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||||
ATTR_NAME = 'name'
|
ATTR_NAME = 'name'
|
||||||
@@ -83,6 +88,7 @@ ATTR_PASSWORD = 'password'
|
|||||||
ATTR_TOTP = 'totp'
|
ATTR_TOTP = 'totp'
|
||||||
ATTR_INITIALIZE = 'initialize'
|
ATTR_INITIALIZE = 'initialize'
|
||||||
ATTR_SESSION = 'session'
|
ATTR_SESSION = 'session'
|
||||||
|
ATTR_SESSIONS = 'sessions'
|
||||||
ATTR_LOCATON = 'location'
|
ATTR_LOCATON = 'location'
|
||||||
ATTR_BUILD = 'build'
|
ATTR_BUILD = 'build'
|
||||||
ATTR_DEVICES = 'devices'
|
ATTR_DEVICES = 'devices'
|
||||||
@@ -95,12 +101,23 @@ ATTR_USER = 'user'
|
|||||||
ATTR_SYSTEM = 'system'
|
ATTR_SYSTEM = 'system'
|
||||||
ATTR_SNAPSHOTS = 'snapshots'
|
ATTR_SNAPSHOTS = 'snapshots'
|
||||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||||
|
ATTR_HASSIO = 'hassio'
|
||||||
|
ATTR_HASSIO_API = 'hassio_api'
|
||||||
ATTR_FOLDERS = 'folders'
|
ATTR_FOLDERS = 'folders'
|
||||||
ATTR_SIZE = 'size'
|
ATTR_SIZE = 'size'
|
||||||
ATTR_TYPE = 'type'
|
ATTR_TYPE = 'type'
|
||||||
ATTR_TIMEOUT = 'timeout'
|
ATTR_TIMEOUT = 'timeout'
|
||||||
ATTR_AUTO_UPDATE = 'auto_update'
|
ATTR_AUTO_UPDATE = 'auto_update'
|
||||||
ATTR_CUSTOM = 'custom'
|
ATTR_CUSTOM = 'custom'
|
||||||
|
ATTR_AUDIO = 'audio'
|
||||||
|
ATTR_AUDIO_INPUT = 'audio_input'
|
||||||
|
ATTR_AUDIO_OUTPUT = 'audio_output'
|
||||||
|
ATTR_INPUT = 'input'
|
||||||
|
ATTR_OUTPUT = 'output'
|
||||||
|
ATTR_DISK = 'disk'
|
||||||
|
ATTR_SERIAL = 'serial'
|
||||||
|
ATTR_SECURITY = 'security'
|
||||||
|
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||||
|
|
||||||
STARTUP_INITIALIZE = 'initialize'
|
STARTUP_INITIALIZE = 'initialize'
|
||||||
STARTUP_SYSTEM = 'system'
|
STARTUP_SYSTEM = 'system'
|
||||||
|
@@ -3,24 +3,27 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import docker
|
|
||||||
|
|
||||||
from .addons import AddonManager
|
from .addons import AddonManager
|
||||||
from .api import RestAPI
|
from .api import RestAPI
|
||||||
from .host_control import HostControl
|
from .host_control import HostControl
|
||||||
from .const import (
|
from .const import (
|
||||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
||||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||||
RUN_UPDATE_ADDONS_TASKS)
|
RUN_UPDATE_ADDONS_TASKS)
|
||||||
|
from .hardware import Hardware
|
||||||
from .homeassistant import HomeAssistant
|
from .homeassistant import HomeAssistant
|
||||||
from .scheduler import Scheduler
|
from .scheduler import Scheduler
|
||||||
|
from .dock import DockerAPI
|
||||||
from .dock.supervisor import DockerSupervisor
|
from .dock.supervisor import DockerSupervisor
|
||||||
|
from .dns import DNSForward
|
||||||
from .snapshots import SnapshotsManager
|
from .snapshots import SnapshotsManager
|
||||||
|
from .updater import Updater
|
||||||
from .tasks import (
|
from .tasks import (
|
||||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
||||||
from .tools import get_local_ip, fetch_timezone
|
from .tools import fetch_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -34,23 +37,26 @@ class HassIO(object):
|
|||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.config = config
|
self.config = config
|
||||||
self.websession = aiohttp.ClientSession(loop=loop)
|
self.websession = aiohttp.ClientSession(loop=loop)
|
||||||
|
self.updater = Updater(config, loop, self.websession)
|
||||||
self.scheduler = Scheduler(loop)
|
self.scheduler = Scheduler(loop)
|
||||||
self.api = RestAPI(config, loop)
|
self.api = RestAPI(config, loop)
|
||||||
self.dock = docker.DockerClient(
|
self.hardware = Hardware()
|
||||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
self.docker = DockerAPI()
|
||||||
|
self.dns = DNSForward()
|
||||||
|
|
||||||
# init basic docker container
|
# init basic docker container
|
||||||
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
|
self.supervisor = DockerSupervisor(
|
||||||
|
config, loop, self.docker, self.stop)
|
||||||
|
|
||||||
# init homeassistant
|
# init homeassistant
|
||||||
self.homeassistant = HomeAssistant(
|
self.homeassistant = HomeAssistant(
|
||||||
config, loop, self.dock, self.websession)
|
config, loop, self.docker, self.updater)
|
||||||
|
|
||||||
# init HostControl
|
# init HostControl
|
||||||
self.host_control = HostControl(loop)
|
self.host_control = HostControl(loop)
|
||||||
|
|
||||||
# init addon system
|
# init addon system
|
||||||
self.addons = AddonManager(config, loop, self.dock)
|
self.addons = AddonManager(config, loop, self.docker)
|
||||||
|
|
||||||
# init snapshot system
|
# init snapshot system
|
||||||
self.snapshots = SnapshotsManager(
|
self.snapshots = SnapshotsManager(
|
||||||
@@ -60,15 +66,12 @@ class HassIO(object):
|
|||||||
"""Setup HassIO orchestration."""
|
"""Setup HassIO orchestration."""
|
||||||
# supervisor
|
# supervisor
|
||||||
if not await self.supervisor.attach():
|
if not await self.supervisor.attach():
|
||||||
_LOGGER.fatal("Can't attach to supervisor docker container!")
|
_LOGGER.fatal("Can't setup supervisor docker container!")
|
||||||
await self.supervisor.cleanup()
|
await self.supervisor.cleanup()
|
||||||
|
|
||||||
# set running arch
|
# set running arch
|
||||||
self.config.arch = self.supervisor.arch
|
self.config.arch = self.supervisor.arch
|
||||||
|
|
||||||
# set api endpoint
|
|
||||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
|
||||||
|
|
||||||
# update timezone
|
# update timezone
|
||||||
if self.config.timezone == 'UTC':
|
if self.config.timezone == 'UTC':
|
||||||
self.config.timezone = await fetch_timezone(self.websession)
|
self.config.timezone = await fetch_timezone(self.websession)
|
||||||
@@ -81,11 +84,11 @@ class HassIO(object):
|
|||||||
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
self.host_control.load, RUN_UPDATE_INFO_TASKS)
|
||||||
|
|
||||||
# rest api views
|
# rest api views
|
||||||
self.api.register_host(self.host_control)
|
self.api.register_host(self.host_control, self.hardware)
|
||||||
self.api.register_network(self.host_control)
|
self.api.register_network(self.host_control)
|
||||||
self.api.register_supervisor(
|
self.api.register_supervisor(
|
||||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||||
self.websession)
|
self.updater)
|
||||||
self.api.register_homeassistant(self.homeassistant)
|
self.api.register_homeassistant(self.homeassistant)
|
||||||
self.api.register_addons(self.addons)
|
self.api.register_addons(self.addons)
|
||||||
self.api.register_security()
|
self.api.register_security()
|
||||||
@@ -111,13 +114,16 @@ class HassIO(object):
|
|||||||
|
|
||||||
# schedule self update task
|
# schedule self update task
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
hassio_update(self.config, self.supervisor, self.websession),
|
hassio_update(self.supervisor, self.updater),
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||||
|
|
||||||
# schedule snapshot update tasks
|
# schedule snapshot update tasks
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
|
||||||
|
|
||||||
|
# start dns forwarding
|
||||||
|
self.loop.create_task(self.dns.start())
|
||||||
|
|
||||||
# start addon mark as initialize
|
# start addon mark as initialize
|
||||||
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
await self.addons.auto_boot(STARTUP_INITIALIZE)
|
||||||
|
|
||||||
@@ -126,32 +132,36 @@ class HassIO(object):
|
|||||||
# on release channel, try update itself
|
# on release channel, try update itself
|
||||||
# on beta channel, only read new versions
|
# on beta channel, only read new versions
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[hassio_update(self.config, self.supervisor, self.websession)()],
|
[hassio_update(self.supervisor, self.updater)()],
|
||||||
loop=self.loop
|
loop=self.loop
|
||||||
)
|
)
|
||||||
|
|
||||||
# start api
|
# start api
|
||||||
await self.api.start()
|
await self.api.start()
|
||||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
_LOGGER.info("Start hassio api on %s", self.docker.network.supervisor)
|
||||||
|
|
||||||
# start addon mark as system
|
|
||||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
# HomeAssistant is already running / supervisor have only reboot
|
||||||
if await self.homeassistant.is_running():
|
if self.hardware.last_boot == self.config.last_boot:
|
||||||
_LOGGER.info("HassIO reboot detected")
|
_LOGGER.info("HassIO reboot detected")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# start addon mark as system
|
||||||
|
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||||
|
|
||||||
# start addon mark as services
|
# start addon mark as services
|
||||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||||
|
|
||||||
# run HomeAssistant
|
# run HomeAssistant
|
||||||
await self.homeassistant.run()
|
if self.homeassistant.boot:
|
||||||
|
await self.homeassistant.run()
|
||||||
|
|
||||||
# start addon mark as application
|
# start addon mark as application
|
||||||
await self.addons.auto_boot(STARTUP_APPLICATION)
|
await self.addons.auto_boot(STARTUP_APPLICATION)
|
||||||
|
|
||||||
|
# store new last boot
|
||||||
|
self.config.last_boot = self.hardware.last_boot
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# schedule homeassistant watchdog
|
# schedule homeassistant watchdog
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
@@ -169,7 +179,7 @@ class HassIO(object):
|
|||||||
|
|
||||||
# process stop tasks
|
# process stop tasks
|
||||||
self.websession.close()
|
self.websession.close()
|
||||||
await self.api.stop()
|
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
||||||
|
|
||||||
self.exit_code = exit_code
|
self.exit_code = exit_code
|
||||||
self.loop.stop()
|
self.loop.stop()
|
||||||
|
40
hassio/dns.py
Normal file
40
hassio/dns.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""Setup the internal DNS service for host applications."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import shlex
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
||||||
|
|
||||||
|
|
||||||
|
class DNSForward(object):
|
||||||
|
"""Manage DNS forwarding to internal DNS."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize DNS forwarding."""
|
||||||
|
self.proc = None
|
||||||
|
|
||||||
|
async def start(self):
|
||||||
|
"""Start DNS forwarding."""
|
||||||
|
try:
|
||||||
|
self.proc = await asyncio.create_subprocess_exec(
|
||||||
|
*shlex.split(COMMAND),
|
||||||
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
|
stdout=asyncio.subprocess.DEVNULL,
|
||||||
|
stderr=asyncio.subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't start DNS forwarding -> %s", err)
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
||||||
|
|
||||||
|
async def stop(self):
|
||||||
|
"""Stop DNS forwarding."""
|
||||||
|
if not self.proc:
|
||||||
|
_LOGGER.warning("DNS forwarding is not running!")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.proc.kill()
|
||||||
|
await self.proc.wait()
|
||||||
|
_LOGGER.info("Stop DNS forwarding")
|
@@ -1,353 +1,108 @@
|
|||||||
"""Init file for HassIO docker object."""
|
"""Init file for HassIO docker object."""
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
from .network import DockerNetwork
|
||||||
|
from ..const import SOCKET_DOCKER
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerBase(object):
|
class DockerAPI(object):
|
||||||
"""Docker hassio wrapper."""
|
"""Docker hassio wrapper.
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, image=None, timeout=30):
|
This class is not AsyncIO safe!
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.config = config
|
self.docker = docker.DockerClient(
|
||||||
self.loop = loop
|
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||||
self.dock = dock
|
self.network = DockerNetwork(self.docker)
|
||||||
self.image = image
|
|
||||||
self.timeout = timeout
|
|
||||||
self.version = None
|
|
||||||
self.arch = None
|
|
||||||
self._lock = asyncio.Lock(loop=loop)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def images(self):
|
||||||
"""Return name of docker container."""
|
"""Return api images."""
|
||||||
return None
|
return self.docker.images
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_progress(self):
|
def containers(self):
|
||||||
"""Return True if a task is in progress."""
|
"""Return api containers."""
|
||||||
return self._lock.locked()
|
return self.docker.containers
|
||||||
|
|
||||||
def process_metadata(self, metadata, force=False):
|
@property
|
||||||
"""Read metadata and set it to object."""
|
def api(self):
|
||||||
# read image
|
"""Return api containers."""
|
||||||
if not self.image:
|
return self.docker.api
|
||||||
self.image = metadata['Config']['Image']
|
|
||||||
|
|
||||||
# read version
|
def run(self, image, **kwargs):
|
||||||
need_version = force or not self.version
|
""""Create a docker and run it.
|
||||||
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
|
||||||
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
|
||||||
elif need_version:
|
|
||||||
_LOGGER.warning("Can't read version from %s", self.name)
|
|
||||||
|
|
||||||
# read arch
|
|
||||||
need_arch = force or not self.arch
|
|
||||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
|
||||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
|
||||||
|
|
||||||
async def install(self, tag):
|
|
||||||
"""Pull docker image."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute install while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._install, tag)
|
|
||||||
|
|
||||||
def _install(self, tag):
|
|
||||||
"""Pull docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
|
name = kwargs.get('name', image)
|
||||||
|
network_mode = kwargs.get('network_mode')
|
||||||
|
hostname = kwargs.get('hostname')
|
||||||
|
|
||||||
|
# setup network
|
||||||
|
if network_mode:
|
||||||
|
kwargs['dns'] = [str(self.network.supervisor)]
|
||||||
|
else:
|
||||||
|
kwargs['network'] = None
|
||||||
|
|
||||||
|
# create container
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
container = self.docker.containers.create(image, **kwargs)
|
||||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't create container from %s -> %s", name, err)
|
||||||
image.tag(self.image, tag='latest')
|
|
||||||
self.process_metadata(image.attrs, force=True)
|
|
||||||
except docker.errors.APIError as err:
|
|
||||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
# attach network
|
||||||
return True
|
if not network_mode:
|
||||||
|
alias = [hostname] if hostname else None
|
||||||
def exists(self):
|
if self.network.attach_container(container, alias=alias):
|
||||||
"""Return True if docker image exists in local repo.
|
self.network.detach_default_bridge(container)
|
||||||
|
|
||||||
Return a Future.
|
|
||||||
"""
|
|
||||||
return self.loop.run_in_executor(None, self._exists)
|
|
||||||
|
|
||||||
def _exists(self):
|
|
||||||
"""Return True if docker image exists in local repo.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.dock.images.get(self.image)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def is_running(self):
|
|
||||||
"""Return True if docker is Running.
|
|
||||||
|
|
||||||
Return a Future.
|
|
||||||
"""
|
|
||||||
return self.loop.run_in_executor(None, self._is_running)
|
|
||||||
|
|
||||||
def _is_running(self):
|
|
||||||
"""Return True if docker is Running.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.dock.containers.get(self.name)
|
|
||||||
image = self.dock.images.get(self.image)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# container is not running
|
|
||||||
if container.status != 'running':
|
|
||||||
return False
|
|
||||||
|
|
||||||
# we run on a old image, stop and start it
|
|
||||||
if container.image.id != image.id:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def attach(self):
|
|
||||||
"""Attach to running docker container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute attach while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._attach)
|
|
||||||
|
|
||||||
def _attach(self):
|
|
||||||
"""Attach to running docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if self.image:
|
|
||||||
obj_data = self.dock.images.get(self.image).attrs
|
|
||||||
else:
|
else:
|
||||||
obj_data = self.dock.containers.get(self.name).attrs
|
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.process_metadata(obj_data)
|
# run container
|
||||||
_LOGGER.info(
|
try:
|
||||||
"Attach to image %s with version %s", self.image, self.version)
|
container.start()
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't start %s -> %s", name, err)
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def run(self):
|
def run_command(self, image, command=None, **kwargs):
|
||||||
"""Run docker image."""
|
"""Create a temporary container and run command.
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute run while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._run)
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
"""Run docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
stdout = kwargs.get('stdout', True)
|
||||||
|
stderr = kwargs.get('stderr', True)
|
||||||
|
|
||||||
async def stop(self):
|
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||||
"""Stop/remove docker container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute stop while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
await self.loop.run_in_executor(None, self._stop)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _stop(self):
|
|
||||||
"""Stop/remove and remove docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
container = self.dock.containers.get(self.name)
|
container = self.docker.containers.run(
|
||||||
except docker.errors.DockerException:
|
image,
|
||||||
return
|
command=command,
|
||||||
|
network=self.network.name,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
if container.status == 'running':
|
# wait until command is done
|
||||||
_LOGGER.info("Stop %s docker application", self.image)
|
exit_code = container.wait()
|
||||||
with suppress(docker.errors.DockerException):
|
output = container.logs(stdout=stdout, stderr=stderr)
|
||||||
container.stop(timeout=self.timeout)
|
|
||||||
|
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't execute command -> %s", err)
|
||||||
|
return (None, b"")
|
||||||
|
|
||||||
|
# cleanup container
|
||||||
with suppress(docker.errors.DockerException):
|
with suppress(docker.errors.DockerException):
|
||||||
_LOGGER.info("Clean %s docker application", self.image)
|
|
||||||
container.remove(force=True)
|
container.remove(force=True)
|
||||||
|
|
||||||
async def remove(self):
|
return (exit_code, output)
|
||||||
"""Remove docker images."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._remove)
|
|
||||||
|
|
||||||
def _remove(self):
|
|
||||||
"""remove docker images.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
# cleanup container
|
|
||||||
self._stop()
|
|
||||||
|
|
||||||
_LOGGER.info(
|
|
||||||
"Remove docker %s with latest and %s", self.image, self.version)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with suppress(docker.errors.ImageNotFound):
|
|
||||||
self.dock.images.remove(
|
|
||||||
image="{}:latest".format(self.image), force=True)
|
|
||||||
|
|
||||||
with suppress(docker.errors.ImageNotFound):
|
|
||||||
self.dock.images.remove(
|
|
||||||
image="{}:{}".format(self.image, self.version), force=True)
|
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# clean metadata
|
|
||||||
self.version = None
|
|
||||||
self.arch = None
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def update(self, tag):
|
|
||||||
"""Update a docker image."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute update while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._update, tag)
|
|
||||||
|
|
||||||
def _update(self, tag):
|
|
||||||
"""Update a docker image.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
was_running = self._is_running()
|
|
||||||
|
|
||||||
_LOGGER.info(
|
|
||||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
|
||||||
|
|
||||||
# update docker image
|
|
||||||
if not self._install(tag):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# run or cleanup container
|
|
||||||
if was_running:
|
|
||||||
self._run()
|
|
||||||
else:
|
|
||||||
self._stop()
|
|
||||||
|
|
||||||
# cleanup images
|
|
||||||
self._cleanup()
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def logs(self):
|
|
||||||
"""Return docker logs of container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute logs while a task is in progress")
|
|
||||||
return b""
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._logs)
|
|
||||||
|
|
||||||
def _logs(self):
|
|
||||||
"""Return docker logs of container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.dock.containers.get(self.name)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return b""
|
|
||||||
|
|
||||||
try:
|
|
||||||
return container.logs(tail=100, stdout=True, stderr=True)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
|
||||||
|
|
||||||
async def restart(self):
|
|
||||||
"""Restart docker container."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute restart while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(None, self._restart)
|
|
||||||
|
|
||||||
def _restart(self):
|
|
||||||
"""Restart docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
container = self.dock.containers.get(self.name)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Restart %s", self.image)
|
|
||||||
|
|
||||||
try:
|
|
||||||
container.restart(timeout=self.timeout)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def cleanup(self):
|
|
||||||
"""Check if old version exists and cleanup."""
|
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute cleanup while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
await self.loop.run_in_executor(None, self._cleanup)
|
|
||||||
|
|
||||||
def _cleanup(self):
|
|
||||||
"""Check if old version exists and cleanup.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
latest = self.dock.images.get(self.image)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
|
||||||
return
|
|
||||||
|
|
||||||
for image in self.dock.images.list(name=self.image):
|
|
||||||
if latest.id == image.id:
|
|
||||||
continue
|
|
||||||
|
|
||||||
with suppress(docker.errors.DockerException):
|
|
||||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
|
||||||
self.dock.images.remove(image.id, force=True)
|
|
||||||
|
@@ -6,21 +6,23 @@ import shutil
|
|||||||
import docker
|
import docker
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from . import DockerBase
|
from .interface import DockerInterface
|
||||||
from .util import dockerfile_template
|
from .util import dockerfile_template, docker_process
|
||||||
from ..const import (
|
from ..const import (
|
||||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||||
|
|
||||||
class DockerAddon(DockerBase):
|
|
||||||
|
class DockerAddon(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, addon):
|
def __init__(self, config, loop, api, addon):
|
||||||
"""Initialize docker homeassistant wrapper."""
|
"""Initialize docker homeassistant wrapper."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
config, loop, dock, image=addon.image, timeout=addon.timeout)
|
config, loop, api, image=addon.image, timeout=addon.timeout)
|
||||||
self.addon = addon
|
self.addon = addon
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -28,16 +30,52 @@ class DockerAddon(DockerBase):
|
|||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return "addon_{}".format(self.addon.slug)
|
return "addon_{}".format(self.addon.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self):
|
||||||
|
"""Return slug/id of addon."""
|
||||||
|
return self.addon.slug.replace('_', '-')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def environment(self):
|
def environment(self):
|
||||||
"""Return environment for docker add-on."""
|
"""Return environment for docker add-on."""
|
||||||
addon_env = self.addon.environment or {}
|
addon_env = self.addon.environment or {}
|
||||||
|
if self.addon.with_audio:
|
||||||
|
addon_env.update({
|
||||||
|
'ALSA_OUTPUT': self.addon.audio_output,
|
||||||
|
'ALSA_INPUT': self.addon.audio_input,
|
||||||
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**addon_env,
|
**addon_env,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self.config.timezone,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devices(self):
|
||||||
|
"""Return needed devices."""
|
||||||
|
devices = self.addon.devices or []
|
||||||
|
|
||||||
|
# use audio devices
|
||||||
|
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||||
|
devices.append(AUDIO_DEVICE)
|
||||||
|
|
||||||
|
# Return None if no devices is present
|
||||||
|
if devices:
|
||||||
|
return devices
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ports(self):
|
||||||
|
"""Filter None from addon ports."""
|
||||||
|
if not self.addon.ports:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return {
|
||||||
|
container_port: host_port
|
||||||
|
for container_port, host_port in self.addon.ports.items()
|
||||||
|
if host_port
|
||||||
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tmpfs(self):
|
def tmpfs(self):
|
||||||
"""Return tmpfs for docker add-on."""
|
"""Return tmpfs for docker add-on."""
|
||||||
@@ -46,6 +84,20 @@ class DockerAddon(DockerBase):
|
|||||||
return {"/tmpfs": "{}".format(options)}
|
return {"/tmpfs": "{}".format(options)}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network_mapping(self):
|
||||||
|
"""Return hosts mapping."""
|
||||||
|
return {
|
||||||
|
'homeassistant': self.docker.network.gateway,
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def network_mode(self):
|
||||||
|
"""Return network mode for addon."""
|
||||||
|
if self.addon.host_network:
|
||||||
|
return 'host'
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def volumes(self):
|
def volumes(self):
|
||||||
"""Generate volumes for mappings."""
|
"""Generate volumes for mappings."""
|
||||||
@@ -103,27 +155,26 @@ class DockerAddon(DockerBase):
|
|||||||
if not self.addon.write_options():
|
if not self.addon.write_options():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
ret = self.docker.run(
|
||||||
self.dock.containers.run(
|
self.image,
|
||||||
self.image,
|
name=self.name,
|
||||||
name=self.name,
|
hostname=self.hostname,
|
||||||
detach=True,
|
detach=True,
|
||||||
network_mode=self.addon.network_mode,
|
network_mode=self.network_mode,
|
||||||
ports=self.addon.ports,
|
ports=self.ports,
|
||||||
devices=self.addon.devices,
|
extra_hosts=self.network_mapping,
|
||||||
cap_add=self.addon.privileged,
|
devices=self.devices,
|
||||||
environment=self.environment,
|
cap_add=self.addon.privileged,
|
||||||
volumes=self.volumes,
|
environment=self.environment,
|
||||||
tmpfs=self.tmpfs
|
volumes=self.volumes,
|
||||||
)
|
tmpfs=self.tmpfs
|
||||||
|
)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
if ret:
|
||||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
_LOGGER.info("Start docker addon %s with version %s",
|
||||||
return False
|
self.image, self.version)
|
||||||
|
|
||||||
_LOGGER.info(
|
return ret
|
||||||
"Start docker addon %s with version %s", self.image, self.version)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _install(self, tag):
|
def _install(self, tag):
|
||||||
"""Pull docker image or build it.
|
"""Pull docker image or build it.
|
||||||
@@ -164,8 +215,10 @@ class DockerAddon(DockerBase):
|
|||||||
build_tag = "{}:{}".format(self.image, tag)
|
build_tag = "{}:{}".format(self.image, tag)
|
||||||
|
|
||||||
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
|
||||||
image = self.dock.images.build(
|
image = self.docker.images.build(
|
||||||
path=str(build_dir), tag=build_tag, pull=True)
|
path=str(build_dir), tag=build_tag, pull=True,
|
||||||
|
forcerm=True
|
||||||
|
)
|
||||||
|
|
||||||
image.tag(self.image, tag='latest')
|
image.tag(self.image, tag='latest')
|
||||||
self.process_metadata(image.attrs, force=True)
|
self.process_metadata(image.attrs, force=True)
|
||||||
@@ -180,15 +233,10 @@ class DockerAddon(DockerBase):
|
|||||||
finally:
|
finally:
|
||||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||||
|
|
||||||
async def export_image(self, path):
|
@docker_process
|
||||||
|
def export_image(self, path):
|
||||||
"""Export current images into a tar file."""
|
"""Export current images into a tar file."""
|
||||||
if self._lock.locked():
|
return self.loop.run_in_executor(None, self._export_image, path)
|
||||||
_LOGGER.error("Can't excute export while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(
|
|
||||||
None, self._export_image, path)
|
|
||||||
|
|
||||||
def _export_image(self, tar_file):
|
def _export_image(self, tar_file):
|
||||||
"""Export current images into a tar file.
|
"""Export current images into a tar file.
|
||||||
@@ -196,7 +244,7 @@ class DockerAddon(DockerBase):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
image = self.dock.api.get_image(self.image)
|
image = self.docker.api.get_image(self.image)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
@@ -212,15 +260,10 @@ class DockerAddon(DockerBase):
|
|||||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def import_image(self, path, tag):
|
@docker_process
|
||||||
|
def import_image(self, path, tag):
|
||||||
"""Import a tar file as image."""
|
"""Import a tar file as image."""
|
||||||
if self._lock.locked():
|
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
||||||
_LOGGER.error("Can't excute import while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with self._lock:
|
|
||||||
return await self.loop.run_in_executor(
|
|
||||||
None, self._import_image, path, tag)
|
|
||||||
|
|
||||||
def _import_image(self, tar_file, tag):
|
def _import_image(self, tar_file, tag):
|
||||||
"""Import a tar file as image.
|
"""Import a tar file as image.
|
||||||
@@ -229,9 +272,9 @@ class DockerAddon(DockerBase):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with tar_file.open("rb") as read_tar:
|
with tar_file.open("rb") as read_tar:
|
||||||
self.dock.api.load_image(read_tar)
|
self.docker.api.load_image(read_tar)
|
||||||
|
|
||||||
image = self.dock.images.get(self.image)
|
image = self.docker.images.get(self.image)
|
||||||
image.tag(self.image, tag=tag)
|
image.tag(self.image, tag=tag)
|
||||||
except (docker.errors.DockerException, OSError) as err:
|
except (docker.errors.DockerException, OSError) as err:
|
||||||
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
_LOGGER.error("Can't import image %s -> %s", self.image, err)
|
||||||
|
@@ -3,19 +3,19 @@ import logging
|
|||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from . import DockerBase
|
from .interface import DockerInterface
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
HASS_DOCKER_NAME = 'homeassistant'
|
HASS_DOCKER_NAME = 'homeassistant'
|
||||||
|
|
||||||
|
|
||||||
class DockerHomeAssistant(DockerBase):
|
class DockerHomeAssistant(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, data):
|
def __init__(self, config, loop, api, data):
|
||||||
"""Initialize docker homeassistant wrapper."""
|
"""Initialize docker homeassistant wrapper."""
|
||||||
super().__init__(config, loop, dock, image=data.image)
|
super().__init__(config, loop, api, image=data.image)
|
||||||
self.data = data
|
self.data = data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -46,31 +46,68 @@ class DockerHomeAssistant(DockerBase):
|
|||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
|
|
||||||
try:
|
ret = self.docker.run(
|
||||||
self.dock.containers.run(
|
self.image,
|
||||||
self.image,
|
name=self.name,
|
||||||
name=self.name,
|
hostname=self.name,
|
||||||
detach=True,
|
detach=True,
|
||||||
privileged=True,
|
privileged=True,
|
||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
network_mode='host',
|
network_mode='host',
|
||||||
environment={
|
environment={
|
||||||
'HASSIO': self.config.api_endpoint,
|
'HASSIO': self.docker.network.supervisor,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self.config.timezone,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self.config.path_extern_config):
|
str(self.config.path_extern_config):
|
||||||
{'bind': '/config', 'mode': 'rw'},
|
{'bind': '/config', 'mode': 'rw'},
|
||||||
str(self.config.path_extern_ssl):
|
str(self.config.path_extern_ssl):
|
||||||
{'bind': '/ssl', 'mode': 'ro'},
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
str(self.config.path_extern_share):
|
str(self.config.path_extern_share):
|
||||||
{'bind': '/share', 'mode': 'rw'},
|
{'bind': '/share', 'mode': 'rw'},
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
if ret:
|
||||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
_LOGGER.info("Start homeassistant %s with version %s",
|
||||||
|
self.image, self.version)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _execute_command(self, command):
|
||||||
|
"""Create a temporary container and run command.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
return self.docker.run_command(
|
||||||
|
self.image,
|
||||||
|
command,
|
||||||
|
detach=True,
|
||||||
|
stdout=True,
|
||||||
|
stderr=True,
|
||||||
|
environment={
|
||||||
|
'TZ': self.config.timezone,
|
||||||
|
},
|
||||||
|
volumes={
|
||||||
|
str(self.config.path_extern_config):
|
||||||
|
{'bind': '/config', 'mode': 'ro'},
|
||||||
|
str(self.config.path_extern_ssl):
|
||||||
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_initialize(self):
|
||||||
|
"""Return True if docker container exists."""
|
||||||
|
return self.loop.run_in_executor(None, self._is_initialize)
|
||||||
|
|
||||||
|
def _is_initialize(self):
|
||||||
|
"""Return True if docker container exists.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
_LOGGER.info(
|
|
||||||
"Start homeassistant %s with version %s", self.image, self.version)
|
|
||||||
return True
|
return True
|
||||||
|
327
hassio/dock/interface.py
Normal file
327
hassio/dock/interface.py
Normal file
@@ -0,0 +1,327 @@
|
|||||||
|
"""Interface class for HassIO docker object."""
|
||||||
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
from .util import docker_process
|
||||||
|
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DockerInterface(object):
|
||||||
|
"""Docker hassio interface."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, api, image=None, timeout=30):
|
||||||
|
"""Initialize docker base wrapper."""
|
||||||
|
self.config = config
|
||||||
|
self.loop = loop
|
||||||
|
self.docker = api
|
||||||
|
|
||||||
|
self.image = image
|
||||||
|
self.timeout = timeout
|
||||||
|
self.version = None
|
||||||
|
self.arch = None
|
||||||
|
self._lock = asyncio.Lock(loop=loop)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return name of docker container."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_progress(self):
|
||||||
|
"""Return True if a task is in progress."""
|
||||||
|
return self._lock.locked()
|
||||||
|
|
||||||
|
def process_metadata(self, metadata, force=False):
|
||||||
|
"""Read metadata and set it to object."""
|
||||||
|
# read image
|
||||||
|
if not self.image:
|
||||||
|
self.image = metadata['Config']['Image']
|
||||||
|
|
||||||
|
# read version
|
||||||
|
need_version = force or not self.version
|
||||||
|
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
|
||||||
|
self.version = metadata['Config']['Labels'][LABEL_VERSION]
|
||||||
|
elif need_version:
|
||||||
|
_LOGGER.warning("Can't read version from %s", self.name)
|
||||||
|
|
||||||
|
# read arch
|
||||||
|
need_arch = force or not self.arch
|
||||||
|
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||||
|
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def install(self, tag):
|
||||||
|
"""Pull docker image."""
|
||||||
|
return self.loop.run_in_executor(None, self._install, tag)
|
||||||
|
|
||||||
|
def _install(self, tag):
|
||||||
|
"""Pull docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||||
|
image = self.docker.images.pull("{}:{}".format(self.image, tag))
|
||||||
|
|
||||||
|
image.tag(self.image, tag='latest')
|
||||||
|
self.process_metadata(image.attrs, force=True)
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def exists(self):
|
||||||
|
"""Return True if docker image exists in local repo."""
|
||||||
|
return self.loop.run_in_executor(None, self._exists)
|
||||||
|
|
||||||
|
def _exists(self):
|
||||||
|
"""Return True if docker image exists in local repo.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.docker.images.get(self.image)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_running(self):
|
||||||
|
"""Return True if docker is Running.
|
||||||
|
|
||||||
|
Return a Future.
|
||||||
|
"""
|
||||||
|
return self.loop.run_in_executor(None, self._is_running)
|
||||||
|
|
||||||
|
def _is_running(self):
|
||||||
|
"""Return True if docker is Running.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
image = self.docker.images.get(self.image)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# container is not running
|
||||||
|
if container.status != 'running':
|
||||||
|
return False
|
||||||
|
|
||||||
|
# we run on a old image, stop and start it
|
||||||
|
if container.image.id != image.id:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def attach(self):
|
||||||
|
"""Attach to running docker container."""
|
||||||
|
return self.loop.run_in_executor(None, self._attach)
|
||||||
|
|
||||||
|
def _attach(self):
|
||||||
|
"""Attach to running docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.image:
|
||||||
|
obj_data = self.docker.images.get(self.image).attrs
|
||||||
|
else:
|
||||||
|
obj_data = self.docker.containers.get(self.name).attrs
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.process_metadata(obj_data)
|
||||||
|
_LOGGER.info(
|
||||||
|
"Attach to image %s with version %s", self.image, self.version)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def run(self):
|
||||||
|
"""Run docker image."""
|
||||||
|
return self.loop.run_in_executor(None, self._run)
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
"""Run docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def stop(self):
|
||||||
|
"""Stop/remove docker container."""
|
||||||
|
return self.loop.run_in_executor(None, self._stop)
|
||||||
|
|
||||||
|
def _stop(self):
|
||||||
|
"""Stop/remove and remove docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if container.status == 'running':
|
||||||
|
_LOGGER.info("Stop %s docker application", self.image)
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
container.stop(timeout=self.timeout)
|
||||||
|
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
_LOGGER.info("Clean %s docker application", self.image)
|
||||||
|
container.remove(force=True)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def remove(self):
|
||||||
|
"""Remove docker images."""
|
||||||
|
return self.loop.run_in_executor(None, self._remove)
|
||||||
|
|
||||||
|
def _remove(self):
|
||||||
|
"""remove docker images.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
# cleanup container
|
||||||
|
self._stop()
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Remove docker %s with latest and %s", self.image, self.version)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with suppress(docker.errors.ImageNotFound):
|
||||||
|
self.docker.images.remove(
|
||||||
|
image="{}:latest".format(self.image), force=True)
|
||||||
|
|
||||||
|
with suppress(docker.errors.ImageNotFound):
|
||||||
|
self.docker.images.remove(
|
||||||
|
image="{}:{}".format(self.image, self.version), force=True)
|
||||||
|
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# clean metadata
|
||||||
|
self.version = None
|
||||||
|
self.arch = None
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def update(self, tag):
|
||||||
|
"""Update a docker image."""
|
||||||
|
return self.loop.run_in_executor(None, self._update, tag)
|
||||||
|
|
||||||
|
def _update(self, tag):
|
||||||
|
"""Update a docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
_LOGGER.info(
|
||||||
|
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||||
|
|
||||||
|
# update docker image
|
||||||
|
if not self._install(tag):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# stop container & cleanup
|
||||||
|
self._stop()
|
||||||
|
self._cleanup()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def logs(self):
|
||||||
|
"""Return docker logs of container.
|
||||||
|
|
||||||
|
Return a Future.
|
||||||
|
"""
|
||||||
|
return self.loop.run_in_executor(None, self._logs)
|
||||||
|
|
||||||
|
def _logs(self):
|
||||||
|
"""Return docker logs of container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return container.logs(tail=100, stdout=True, stderr=True)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def restart(self):
|
||||||
|
"""Restart docker container."""
|
||||||
|
return self.loop.run_in_executor(None, self._restart)
|
||||||
|
|
||||||
|
def _restart(self):
|
||||||
|
"""Restart docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Restart %s", self.image)
|
||||||
|
|
||||||
|
try:
|
||||||
|
container.restart(timeout=self.timeout)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def cleanup(self):
|
||||||
|
"""Check if old version exists and cleanup."""
|
||||||
|
return self.loop.run_in_executor(None, self._cleanup)
|
||||||
|
|
||||||
|
def _cleanup(self):
|
||||||
|
"""Check if old version exists and cleanup.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
latest = self.docker.images.get(self.image)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||||
|
return False
|
||||||
|
|
||||||
|
for image in self.docker.images.list(name=self.image):
|
||||||
|
if latest.id == image.id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||||
|
self.docker.images.remove(image.id, force=True)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def execute_command(self, command):
|
||||||
|
"""Create a temporary container and run command."""
|
||||||
|
return self.loop.run_in_executor(None, self._execute_command, command)
|
||||||
|
|
||||||
|
def _execute_command(self, command):
|
||||||
|
"""Create a temporary container and run command.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
89
hassio/dock/network.py
Normal file
89
hassio/dock/network.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Internal network manager for HassIO."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DockerNetwork(object):
|
||||||
|
"""Internal HassIO Network."""
|
||||||
|
|
||||||
|
def __init__(self, dock):
|
||||||
|
"""Initialize internal hassio network."""
|
||||||
|
self.docker = dock
|
||||||
|
self.network = self._get_network()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""Return name of network."""
|
||||||
|
return DOCKER_NETWORK
|
||||||
|
|
||||||
|
@property
|
||||||
|
def containers(self):
|
||||||
|
"""Return of connected containers from network."""
|
||||||
|
return self.network.containers
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gateway(self):
|
||||||
|
"""Return gateway of the network."""
|
||||||
|
return DOCKER_NETWORK_MASK[1]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supervisor(self):
|
||||||
|
"""Return supervisor of the network."""
|
||||||
|
return DOCKER_NETWORK_MASK[2]
|
||||||
|
|
||||||
|
def _get_network(self):
|
||||||
|
"""Get HassIO network."""
|
||||||
|
try:
|
||||||
|
return self.docker.networks.get(DOCKER_NETWORK)
|
||||||
|
except docker.errors.NotFound:
|
||||||
|
_LOGGER.info("Can't find HassIO network, create new network")
|
||||||
|
|
||||||
|
ipam_pool = docker.types.IPAMPool(
|
||||||
|
subnet=str(DOCKER_NETWORK_MASK),
|
||||||
|
gateway=str(self.gateway),
|
||||||
|
iprange=str(DOCKER_NETWORK_RANGE)
|
||||||
|
)
|
||||||
|
|
||||||
|
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||||
|
|
||||||
|
return self.docker.networks.create(
|
||||||
|
DOCKER_NETWORK, driver='bridge', ipam=ipam_config, options={
|
||||||
|
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||||
|
})
|
||||||
|
|
||||||
|
def attach_container(self, container, alias=None, ipv4=None):
|
||||||
|
"""Attach container to hassio network.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
ipv4 = str(ipv4) if ipv4 else None
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
_LOGGER.error("Can't link container to hassio-net -> %s", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.network.reload()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def detach_default_bridge(self, container):
|
||||||
|
"""Detach default docker bridge.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
default_network = self.docker.networks.get('bridge')
|
||||||
|
default_network.disconnect(container)
|
||||||
|
|
||||||
|
except docker.errors.NotFound:
|
||||||
|
return
|
||||||
|
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Can't disconnect container from default -> %s", err)
|
@@ -2,18 +2,21 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from . import DockerBase
|
import docker
|
||||||
|
|
||||||
|
from .interface import DockerInterface
|
||||||
|
from .util import docker_process
|
||||||
from ..const import RESTART_EXIT_CODE
|
from ..const import RESTART_EXIT_CODE
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerSupervisor(DockerBase):
|
class DockerSupervisor(DockerInterface):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, stop_callback, image=None):
|
def __init__(self, config, loop, api, stop_callback, image=None):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
super().__init__(config, loop, dock, image=image)
|
super().__init__(config, loop, api, image=image)
|
||||||
self.stop_callback = stop_callback
|
self.stop_callback = stop_callback
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -21,20 +24,38 @@ class DockerSupervisor(DockerBase):
|
|||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return os.environ['SUPERVISOR_NAME']
|
return os.environ['SUPERVISOR_NAME']
|
||||||
|
|
||||||
|
def _attach(self):
|
||||||
|
"""Attach to running docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
except docker.errors.DockerException:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.process_metadata(container.attrs)
|
||||||
|
_LOGGER.info("Attach to supervisor %s with version %s",
|
||||||
|
self.image, self.version)
|
||||||
|
|
||||||
|
# if already attach
|
||||||
|
if container in self.docker.network.containers:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# attach to network
|
||||||
|
return self.docker.network.attach_container(
|
||||||
|
container, alias=['hassio'], ipv4=self.docker.network.supervisor)
|
||||||
|
|
||||||
|
@docker_process
|
||||||
async def update(self, tag):
|
async def update(self, tag):
|
||||||
"""Update a supervisor docker image."""
|
"""Update a supervisor docker image."""
|
||||||
if self._lock.locked():
|
|
||||||
_LOGGER.error("Can't excute update while a task is in progress")
|
|
||||||
return False
|
|
||||||
|
|
||||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||||
|
|
||||||
async with self._lock:
|
if await self.loop.run_in_executor(None, self._install, tag):
|
||||||
if await self.loop.run_in_executor(None, self._install, tag):
|
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
return True
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def run(self):
|
async def run(self):
|
||||||
"""Run docker image."""
|
"""Run docker image."""
|
||||||
|
@@ -1,8 +1,10 @@
|
|||||||
"""HassIO docker utilitys."""
|
"""HassIO docker utilitys."""
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
HASSIO_BASE_IMAGE = {
|
HASSIO_BASE_IMAGE = {
|
||||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||||
@@ -40,3 +42,19 @@ def create_metadata(version, arch, meta_type):
|
|||||||
return ('LABEL io.hass.version="{}" '
|
return ('LABEL io.hass.version="{}" '
|
||||||
'io.hass.arch="{}" '
|
'io.hass.arch="{}" '
|
||||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
'io.hass.type="{}"').format(version, arch, meta_type)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
def docker_process(method):
|
||||||
|
"""Wrap function with only run once."""
|
||||||
|
async def wrap_api(api, *args, **kwargs):
|
||||||
|
"""Return api wrapper."""
|
||||||
|
if api._lock.locked():
|
||||||
|
_LOGGER.error(
|
||||||
|
"Can't excute %s while a task is in progress", method.__name__)
|
||||||
|
return False
|
||||||
|
|
||||||
|
async with api._lock:
|
||||||
|
return await method(api, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrap_api
|
||||||
|
109
hassio/hardware.py
Normal file
109
hassio/hardware.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
"""Read hardware info from system."""
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pyudev
|
||||||
|
|
||||||
|
from .const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ASOUND_CARDS = Path("/proc/asound/cards")
|
||||||
|
RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
||||||
|
|
||||||
|
ASOUND_DEVICES = Path("/proc/asound/devices")
|
||||||
|
RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
||||||
|
|
||||||
|
PROC_STAT = Path("/proc/stat")
|
||||||
|
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||||
|
|
||||||
|
|
||||||
|
class Hardware(object):
|
||||||
|
"""Represent a interface to procfs, sysfs and udev."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Init hardware object."""
|
||||||
|
self.context = pyudev.Context()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial_devices(self):
|
||||||
|
"""Return all serial and connected devices."""
|
||||||
|
dev_list = set()
|
||||||
|
for device in self.context.list_devices(subsystem='tty'):
|
||||||
|
if 'ID_VENDOR' in device:
|
||||||
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
|
return list(dev_list)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def input_devices(self):
|
||||||
|
"""Return all input devices."""
|
||||||
|
dev_list = set()
|
||||||
|
for device in self.context.list_devices(subsystem='input'):
|
||||||
|
if 'NAME' in device:
|
||||||
|
dev_list.add(device['NAME'].replace('"', ''))
|
||||||
|
|
||||||
|
return list(dev_list)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def disk_devices(self):
|
||||||
|
"""Return all disk devices."""
|
||||||
|
dev_list = set()
|
||||||
|
for device in self.context.list_devices(subsystem='block'):
|
||||||
|
if device.device_node.startswith('/dev/sd'):
|
||||||
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
|
return list(dev_list)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_devices(self):
|
||||||
|
"""Return all available audio interfaces."""
|
||||||
|
try:
|
||||||
|
with ASOUND_CARDS.open('r') as cards_file:
|
||||||
|
cards = cards_file.read()
|
||||||
|
with ASOUND_DEVICES.open('r') as devices_file:
|
||||||
|
devices = devices_file.read()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't read asound data -> %s", err)
|
||||||
|
return
|
||||||
|
|
||||||
|
audio_list = {}
|
||||||
|
|
||||||
|
# parse cards
|
||||||
|
for match in RE_CARDS.finditer(cards):
|
||||||
|
audio_list[match.group(1)] = {
|
||||||
|
ATTR_NAME: match.group(3),
|
||||||
|
ATTR_TYPE: match.group(2),
|
||||||
|
ATTR_DEVICES: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# parse devices
|
||||||
|
for match in RE_DEVICES.finditer(devices):
|
||||||
|
try:
|
||||||
|
audio_list[match.group(1)][ATTR_DEVICES][match.group(2)] = \
|
||||||
|
match.group(3)
|
||||||
|
except KeyError:
|
||||||
|
_LOGGER.warning("Wrong audio device found %s", match.group(0))
|
||||||
|
continue
|
||||||
|
|
||||||
|
return audio_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_boot(self):
|
||||||
|
"""Return last boot time."""
|
||||||
|
try:
|
||||||
|
with PROC_STAT.open("r") as stat_file:
|
||||||
|
stats = stat_file.read()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't read stat data -> %s", err)
|
||||||
|
return
|
||||||
|
|
||||||
|
# parse stat file
|
||||||
|
found = RE_BOOT_TIME.search(stats)
|
||||||
|
if not found:
|
||||||
|
_LOGGER.error("Can't found last boot time!")
|
||||||
|
return
|
||||||
|
|
||||||
|
return datetime.utcfromtimestamp(int(found.group(1)))
|
@@ -2,27 +2,30 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||||
ATTR_VERSION)
|
ATTR_VERSION, ATTR_BOOT)
|
||||||
from .dock.homeassistant import DockerHomeAssistant
|
from .dock.homeassistant import DockerHomeAssistant
|
||||||
from .tools import JsonConfig
|
from .tools import JsonConfig, convert_to_ascii
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||||
|
|
||||||
|
|
||||||
class HomeAssistant(JsonConfig):
|
class HomeAssistant(JsonConfig):
|
||||||
"""Hass core object for handle it."""
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
def __init__(self, config, loop, dock, websession):
|
def __init__(self, config, loop, docker, updater):
|
||||||
"""Initialize hass object."""
|
"""Initialize hass object."""
|
||||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.websession = websession
|
self.updater = updater
|
||||||
self.docker = DockerHomeAssistant(config, loop, dock, self)
|
self.docker = DockerHomeAssistant(config, loop, docker, self)
|
||||||
|
|
||||||
async def prepare(self):
|
async def prepare(self):
|
||||||
"""Prepare HomeAssistant object."""
|
"""Prepare HomeAssistant object."""
|
||||||
@@ -45,7 +48,7 @@ class HomeAssistant(JsonConfig):
|
|||||||
"""Return last available version of homeassistant."""
|
"""Return last available version of homeassistant."""
|
||||||
if self.is_custom_image:
|
if self.is_custom_image:
|
||||||
return self._data.get(ATTR_LAST_VERSION)
|
return self._data.get(ATTR_LAST_VERSION)
|
||||||
return self.config.last_homeassistant
|
return self.updater.version_homeassistant
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self):
|
def image(self):
|
||||||
@@ -70,6 +73,17 @@ class HomeAssistant(JsonConfig):
|
|||||||
self._data[ATTR_DEVICES] = value
|
self._data[ATTR_DEVICES] = value
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot(self):
|
||||||
|
"""Return True if home-assistant boot is enabled."""
|
||||||
|
return self._data[ATTR_BOOT]
|
||||||
|
|
||||||
|
@boot.setter
|
||||||
|
def boot(self, value):
|
||||||
|
"""Set home-assistant boot options."""
|
||||||
|
self._data[ATTR_BOOT] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
def set_custom(self, image, version):
|
def set_custom(self, image, version):
|
||||||
"""Set a custom image for homeassistant."""
|
"""Set a custom image for homeassistant."""
|
||||||
# reset
|
# reset
|
||||||
@@ -101,7 +115,7 @@ class HomeAssistant(JsonConfig):
|
|||||||
while True:
|
while True:
|
||||||
# read homeassistant tag and install it
|
# read homeassistant tag and install it
|
||||||
if not self.last_version:
|
if not self.last_version:
|
||||||
await self.config.fetch_update_infos(self.websession)
|
await self.updater.fetch_data()
|
||||||
|
|
||||||
tag = self.last_version
|
tag = self.last_version
|
||||||
if tag and await self.docker.install(tag):
|
if tag and await self.docker.install(tag):
|
||||||
@@ -113,13 +127,20 @@ class HomeAssistant(JsonConfig):
|
|||||||
_LOGGER.info("HomeAssistant docker now installed")
|
_LOGGER.info("HomeAssistant docker now installed")
|
||||||
await self.docker.cleanup()
|
await self.docker.cleanup()
|
||||||
|
|
||||||
def update(self, version=None):
|
async def update(self, version=None):
|
||||||
"""Update HomeAssistant version.
|
"""Update HomeAssistant version."""
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
version = version or self.last_version
|
version = version or self.last_version
|
||||||
return self.docker.update(version)
|
running = await self.docker.is_running()
|
||||||
|
|
||||||
|
if version == self.docker.version:
|
||||||
|
_LOGGER.warning("Version %s is already installed", version)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.docker.update(version)
|
||||||
|
finally:
|
||||||
|
if running:
|
||||||
|
await self.docker.run()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Run HomeAssistant docker.
|
"""Run HomeAssistant docker.
|
||||||
@@ -156,7 +177,30 @@ class HomeAssistant(JsonConfig):
|
|||||||
"""
|
"""
|
||||||
return self.docker.is_running()
|
return self.docker.is_running()
|
||||||
|
|
||||||
|
def is_initialize(self):
|
||||||
|
"""Return True if a docker container is exists.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.docker.is_initialize()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_progress(self):
|
def in_progress(self):
|
||||||
"""Return True if a task is in progress."""
|
"""Return True if a task is in progress."""
|
||||||
return self.docker.in_progress
|
return self.docker.in_progress
|
||||||
|
|
||||||
|
async def check_config(self):
|
||||||
|
"""Run homeassistant config check."""
|
||||||
|
exit_code, log = await self.docker.execute_command(
|
||||||
|
"python3 -m homeassistant -c /config --script check_config"
|
||||||
|
)
|
||||||
|
|
||||||
|
# if not valid
|
||||||
|
if exit_code is None:
|
||||||
|
return (False, "")
|
||||||
|
|
||||||
|
# parse output
|
||||||
|
log = convert_to_ascii(log)
|
||||||
|
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||||
|
return (False, log)
|
||||||
|
return (True, log)
|
||||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -13,7 +13,7 @@ def api_sessions_cleanup(config):
|
|||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
for session, until_valid in config.security_sessions.items():
|
for session, until_valid in config.security_sessions.items():
|
||||||
if now >= until_valid:
|
if now >= until_valid:
|
||||||
config.security_sessions = (session, None)
|
config.drop_security_session(session)
|
||||||
|
|
||||||
return _api_sessions_cleanup
|
return _api_sessions_cleanup
|
||||||
|
|
||||||
@@ -27,8 +27,14 @@ def addons_update(loop, addons):
|
|||||||
if not addon.is_installed or not addon.auto_update:
|
if not addon.is_installed or not addon.auto_update:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if addon.version_installed != addon.version:
|
if addon.version_installed == addon.last_version:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if addon.test_udpate_schema():
|
||||||
tasks.append(addon.update())
|
tasks.append(addon.update())
|
||||||
|
else:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Addon %s will be ignore, schema tests fails", addon.slug)
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
|
||||||
@@ -37,21 +43,21 @@ def addons_update(loop, addons):
|
|||||||
return _addons_update
|
return _addons_update
|
||||||
|
|
||||||
|
|
||||||
def hassio_update(config, supervisor, websession):
|
def hassio_update(supervisor, updater):
|
||||||
"""Create scheduler task for update of supervisor hassio."""
|
"""Create scheduler task for update of supervisor hassio."""
|
||||||
async def _hassio_update():
|
async def _hassio_update():
|
||||||
"""Check and run update of supervisor hassio."""
|
"""Check and run update of supervisor hassio."""
|
||||||
await config.fetch_update_infos(websession)
|
await updater.fetch_data()
|
||||||
if config.last_hassio == supervisor.version:
|
if updater.version_hassio == supervisor.version:
|
||||||
return
|
return
|
||||||
|
|
||||||
# don't perform a update on beta/dev channel
|
# don't perform a update on beta/dev channel
|
||||||
if config.upstream_beta:
|
if updater.beta_channel:
|
||||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Found new HassIO version %s.", config.last_hassio)
|
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
|
||||||
await supervisor.update(config.last_hassio)
|
await supervisor.update(updater.version_hassio)
|
||||||
|
|
||||||
return _hassio_update
|
return _hassio_update
|
||||||
|
|
||||||
@@ -60,6 +66,11 @@ def homeassistant_watchdog(loop, homeassistant):
|
|||||||
"""Create scheduler task for montoring running state."""
|
"""Create scheduler task for montoring running state."""
|
||||||
async def _homeassistant_watchdog():
|
async def _homeassistant_watchdog():
|
||||||
"""Check running state and start if they is close."""
|
"""Check running state and start if they is close."""
|
||||||
|
# if Home-Assistant is active
|
||||||
|
if not await homeassistant.is_initialize():
|
||||||
|
return
|
||||||
|
|
||||||
|
# If Home-Assistant is running
|
||||||
if homeassistant.in_progress or await homeassistant.is_running():
|
if homeassistant.in_progress or await homeassistant.is_running():
|
||||||
return
|
return
|
||||||
|
|
||||||
|
131
hassio/tools.py
131
hassio/tools.py
@@ -1,9 +1,10 @@
|
|||||||
"""Tools file for HassIO."""
|
"""Tools file for HassIO."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import re
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import async_timeout
|
import async_timeout
|
||||||
@@ -11,51 +12,21 @@ import pytz
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||||
|
|
||||||
|
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||||
|
|
||||||
async def fetch_last_versions(websession, beta=False):
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
"""Fetch current versions from github.
|
# All rights reserved.
|
||||||
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
Is a coroutine.
|
DATETIME_RE = re.compile(
|
||||||
"""
|
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
|
||||||
url = URL_HASSIO_VERSION_BETA if beta else URL_HASSIO_VERSION
|
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
|
||||||
try:
|
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
|
||||||
with async_timeout.timeout(10, loop=websession.loop):
|
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
|
||||||
async with websession.get(url) as request:
|
)
|
||||||
return await request.json(content_type=None)
|
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
|
||||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
|
||||||
|
|
||||||
except json.JSONDecodeError as err:
|
|
||||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
|
||||||
|
|
||||||
|
|
||||||
def get_local_ip(loop):
|
|
||||||
"""Retrieve local IP address.
|
|
||||||
|
|
||||||
Return a future.
|
|
||||||
"""
|
|
||||||
def local_ip():
|
|
||||||
"""Return local ip."""
|
|
||||||
try:
|
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
||||||
|
|
||||||
# Use Google Public DNS server to determine own IP
|
|
||||||
sock.connect(('8.8.8.8', 80))
|
|
||||||
|
|
||||||
return sock.getsockname()[0]
|
|
||||||
except socket.error:
|
|
||||||
return socket.gethostbyname(socket.gethostname())
|
|
||||||
finally:
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
return loop.run_in_executor(None, local_ip)
|
|
||||||
|
|
||||||
|
|
||||||
def write_json_file(jsonfile, data):
|
def write_json_file(jsonfile, data):
|
||||||
@@ -76,19 +47,6 @@ def read_json_file(jsonfile):
|
|||||||
return json.loads(cfile.read())
|
return json.loads(cfile.read())
|
||||||
|
|
||||||
|
|
||||||
def validate_timezone(timezone):
|
|
||||||
"""Validate voluptuous timezone."""
|
|
||||||
try:
|
|
||||||
pytz.timezone(timezone)
|
|
||||||
except pytz.exceptions.UnknownTimeZoneError:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Invalid time zone passed in. Valid options can be found here: "
|
|
||||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
|
||||||
from None
|
|
||||||
|
|
||||||
return timezone
|
|
||||||
|
|
||||||
|
|
||||||
async def fetch_timezone(websession):
|
async def fetch_timezone(websession):
|
||||||
"""Read timezone from freegeoip."""
|
"""Read timezone from freegeoip."""
|
||||||
data = {}
|
data = {}
|
||||||
@@ -101,6 +59,47 @@ async def fetch_timezone(websession):
|
|||||||
return data.get('time_zone', 'UTC')
|
return data.get('time_zone', 'UTC')
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_ascii(raw):
|
||||||
|
"""Convert binary to ascii and remove colors."""
|
||||||
|
return RE_STRING.sub("", raw.decode())
|
||||||
|
|
||||||
|
|
||||||
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
|
# All rights reserved.
|
||||||
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
|
def parse_datetime(dt_str):
|
||||||
|
"""Parse a string and return a datetime.datetime.
|
||||||
|
|
||||||
|
This function supports time zone offsets. When the input contains one,
|
||||||
|
the output uses a timezone with a fixed offset from UTC.
|
||||||
|
Raises ValueError if the input is well formatted but not a valid datetime.
|
||||||
|
Returns None if the input isn't well formatted.
|
||||||
|
"""
|
||||||
|
match = DATETIME_RE.match(dt_str)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
kws = match.groupdict() # type: Dict[str, Any]
|
||||||
|
if kws['microsecond']:
|
||||||
|
kws['microsecond'] = kws['microsecond'].ljust(6, '0')
|
||||||
|
tzinfo_str = kws.pop('tzinfo')
|
||||||
|
|
||||||
|
tzinfo = None # type: Optional[dt.tzinfo]
|
||||||
|
if tzinfo_str == 'Z':
|
||||||
|
tzinfo = pytz.utc
|
||||||
|
elif tzinfo_str is not None:
|
||||||
|
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
|
||||||
|
offset_hours = int(tzinfo_str[1:3])
|
||||||
|
offset = timedelta(hours=offset_hours, minutes=offset_mins)
|
||||||
|
if tzinfo_str[0] == '-':
|
||||||
|
offset = -offset
|
||||||
|
tzinfo = timezone(offset)
|
||||||
|
else:
|
||||||
|
tzinfo = None
|
||||||
|
kws = {k: int(v) for k, v in kws.items() if v is not None}
|
||||||
|
kws['tzinfo'] = tzinfo
|
||||||
|
return datetime(**kws)
|
||||||
|
|
||||||
|
|
||||||
class JsonConfig(object):
|
class JsonConfig(object):
|
||||||
"""Hass core object for handle it."""
|
"""Hass core object for handle it."""
|
||||||
|
|
||||||
@@ -140,3 +139,27 @@ class JsonConfig(object):
|
|||||||
_LOGGER.error("Can't store config in %s", self._file)
|
_LOGGER.error("Can't store config in %s", self._file)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncThrottle(object):
|
||||||
|
"""
|
||||||
|
Decorator that prevents a function from being called more than once every
|
||||||
|
time period.
|
||||||
|
"""
|
||||||
|
def __init__(self, delta):
|
||||||
|
"""Initialize async throttle."""
|
||||||
|
self.throttle_period = delta
|
||||||
|
self.time_of_last_call = datetime.min
|
||||||
|
|
||||||
|
def __call__(self, method):
|
||||||
|
"""Throttle function"""
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
"""Throttle function wrapper"""
|
||||||
|
now = datetime.now()
|
||||||
|
time_since_last_call = now - self.time_of_last_call
|
||||||
|
|
||||||
|
if time_since_last_call > self.throttle_period:
|
||||||
|
self.time_of_last_call = now
|
||||||
|
return await method(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
86
hassio/updater.py
Normal file
86
hassio/updater.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
"""Fetch last versions from webserver."""
|
||||||
|
import asyncio
|
||||||
|
from datetime import timedelta
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import async_timeout
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||||
|
ATTR_BETA_CHANNEL)
|
||||||
|
from .tools import AsyncThrottle, JsonConfig
|
||||||
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Updater(JsonConfig):
|
||||||
|
"""Fetch last versions from version.json."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, websession):
|
||||||
|
"""Initialize updater."""
|
||||||
|
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||||
|
self.config = config
|
||||||
|
self.loop = loop
|
||||||
|
self.websession = websession
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_homeassistant(self):
|
||||||
|
"""Return last version of homeassistant."""
|
||||||
|
return self._data.get(ATTR_HOMEASSISTANT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_hassio(self):
|
||||||
|
"""Return last version of hassio."""
|
||||||
|
return self._data.get(ATTR_HASSIO)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def upstream(self):
|
||||||
|
"""Return Upstream branch for version."""
|
||||||
|
if self.beta_channel:
|
||||||
|
return 'dev'
|
||||||
|
return 'master'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def beta_channel(self):
|
||||||
|
"""Return True if we run in beta upstream."""
|
||||||
|
return self._data[ATTR_BETA_CHANNEL]
|
||||||
|
|
||||||
|
@beta_channel.setter
|
||||||
|
def beta_channel(self, value):
|
||||||
|
"""Set beta upstream mode."""
|
||||||
|
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@AsyncThrottle(timedelta(seconds=60))
|
||||||
|
async def fetch_data(self):
|
||||||
|
"""Fetch current versions from github.
|
||||||
|
|
||||||
|
Is a coroutine.
|
||||||
|
"""
|
||||||
|
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Fetch update data from %s", url)
|
||||||
|
with async_timeout.timeout(10, loop=self.loop):
|
||||||
|
async with self.websession.get(url) as request:
|
||||||
|
data = await request.json(content_type=None)
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||||
|
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
|
||||||
|
return
|
||||||
|
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
|
||||||
|
return
|
||||||
|
|
||||||
|
# data valid?
|
||||||
|
if not data:
|
||||||
|
_LOGGER.warning("Invalid data from %s", url)
|
||||||
|
return
|
||||||
|
|
||||||
|
# update versions
|
||||||
|
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||||
|
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||||
|
self.save()
|
@@ -1,11 +1,31 @@
|
|||||||
"""Validate functions."""
|
"""Validate functions."""
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .const import ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION
|
import pytz
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
||||||
|
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||||
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT)
|
||||||
|
|
||||||
|
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||||
|
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_timezone(timezone):
|
||||||
|
"""Validate voluptuous timezone."""
|
||||||
|
try:
|
||||||
|
pytz.timezone(timezone)
|
||||||
|
except pytz.exceptions.UnknownTimeZoneError:
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Invalid time zone passed in. Valid options can be found here: "
|
||||||
|
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||||
|
from None
|
||||||
|
|
||||||
|
return timezone
|
||||||
|
|
||||||
|
|
||||||
def convert_to_docker_ports(data):
|
def convert_to_docker_ports(data):
|
||||||
@@ -35,8 +55,33 @@ DOCKER_PORTS = vol.Schema({
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema({
|
SCHEMA_HASS_CONFIG = vol.Schema({
|
||||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||||
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_UPDATER_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
||||||
|
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
||||||
|
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||||
|
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_SESSIONS, default={}):
|
||||||
|
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
Submodule home-assistant-polymer updated: 5cdba73bac...9b9cba86c2
4
setup.py
4
setup.py
@@ -46,6 +46,8 @@ setup(
|
|||||||
'gitpython',
|
'gitpython',
|
||||||
'pyotp',
|
'pyotp',
|
||||||
'pyqrcode',
|
'pyqrcode',
|
||||||
'pytz'
|
'pytz',
|
||||||
|
'pyudev',
|
||||||
|
'deepmerge'
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"hassio": "0.46",
|
"hassio": "0.62",
|
||||||
"homeassistant": "0.49",
|
"homeassistant": "0.53",
|
||||||
"resinos": "1.0",
|
"resinos": "1.0",
|
||||||
"resinhup": "0.2",
|
"resinhup": "0.3",
|
||||||
"generic": "0.3",
|
"generic": "0.3",
|
||||||
"cluster": "0.1"
|
"cluster": "0.1"
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user