mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-25 18:16:32 +00:00
commit
164c403d05
@ -1,6 +1,7 @@
|
|||||||
# https://dev.azure.com/home-assistant
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
|
batch: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- master
|
- master
|
||||||
@ -18,7 +19,7 @@ variables:
|
|||||||
- name: versionBuilder
|
- name: versionBuilder
|
||||||
value: '3.2'
|
value: '3.2'
|
||||||
- name: versionWheels
|
- name: versionWheels
|
||||||
value: '0.3'
|
value: '0.6'
|
||||||
- group: docker
|
- group: docker
|
||||||
- group: wheels
|
- group: wheels
|
||||||
|
|
||||||
@ -49,7 +50,7 @@ jobs:
|
|||||||
versionSpec: '3.7'
|
versionSpec: '3.7'
|
||||||
- script: pip install black
|
- script: pip install black
|
||||||
displayName: 'Install black'
|
displayName: 'Install black'
|
||||||
- script: black --check hassio
|
- script: black --check hassio tests
|
||||||
displayName: 'Run Black'
|
displayName: 'Run Black'
|
||||||
|
|
||||||
|
|
||||||
@ -101,7 +102,8 @@ jobs:
|
|||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends \
|
sudo apt-get install -y --no-install-recommends \
|
||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
binfmt-support
|
binfmt-support \
|
||||||
|
curl
|
||||||
|
|
||||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||||
sudo update-binfmts --enable qemu-arm
|
sudo update-binfmts --enable qemu-arm
|
||||||
@ -116,11 +118,13 @@ jobs:
|
|||||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||||
displayName: 'Install wheels builder'
|
displayName: 'Install wheels builder'
|
||||||
- script: |
|
- script: |
|
||||||
|
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/hassio/master/requirements.txt
|
||||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||||
--apk "build-base;libffi-dev;openssl-dev" \
|
--apk "build-base;libffi-dev;openssl-dev" \
|
||||||
--index https://wheels.hass.io \
|
--index $(wheelsIndex) \
|
||||||
--requirement requirements.txt \
|
--requirement requirements.txt \
|
||||||
|
--requirement-diff requirements_diff.txt \
|
||||||
--upload rsync \
|
--upload rsync \
|
||||||
--remote wheels@$(wheelsHost):/opt/wheels
|
--remote wheels@$(wheelsHost):/opt/wheels
|
||||||
displayName: 'Run wheels build'
|
displayName: 'Run wheels build'
|
||||||
|
@ -158,7 +158,12 @@ class APIIngress(CoreSysAttributes):
|
|||||||
source_header = _init_header(request, addon)
|
source_header = _init_header(request, addon)
|
||||||
|
|
||||||
async with self.sys_websession.request(
|
async with self.sys_websession.request(
|
||||||
request.method, url, headers=source_header, params=request.query, data=data
|
request.method,
|
||||||
|
url,
|
||||||
|
headers=source_header,
|
||||||
|
params=request.query,
|
||||||
|
allow_redirects=False,
|
||||||
|
data=data,
|
||||||
) as result:
|
) as result:
|
||||||
headers = _response_header(result)
|
headers = _response_header(result)
|
||||||
|
|
||||||
|
@ -1,14 +1,24 @@
|
|||||||
"""Handle Arch for underlay maschine/platforms."""
|
"""Handle Arch for underlay maschine/platforms."""
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import platform
|
||||||
|
from typing import List
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes, CoreSys
|
from .coresys import CoreSys, CoreSysAttributes
|
||||||
from .exceptions import HassioArchNotFound, JsonFileError
|
from .exceptions import HassioArchNotFound, JsonFileError
|
||||||
from .utils.json import read_json_file
|
from .utils.json import read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MAP_CPU = {
|
||||||
|
"armv7": "armv7",
|
||||||
|
"armv6": "armhf",
|
||||||
|
"armv8": "aarch64",
|
||||||
|
"aarch64": "aarch64",
|
||||||
|
"i686": "i386",
|
||||||
|
"x86_64": "amd64",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class CpuArch(CoreSysAttributes):
|
class CpuArch(CoreSysAttributes):
|
||||||
"""Manage available architectures."""
|
"""Manage available architectures."""
|
||||||
@ -42,10 +52,12 @@ class CpuArch(CoreSysAttributes):
|
|||||||
_LOGGER.warning("Can't read arch json")
|
_LOGGER.warning("Can't read arch json")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
native_support = self.detect_cpu()
|
||||||
|
|
||||||
# Evaluate current CPU/Platform
|
# Evaluate current CPU/Platform
|
||||||
if not self.sys_machine or self.sys_machine not in arch_data:
|
if not self.sys_machine or self.sys_machine not in arch_data:
|
||||||
_LOGGER.warning("Can't detect underlay machine type!")
|
_LOGGER.warning("Can't detect underlay machine type!")
|
||||||
self._default_arch = self.sys_supervisor.arch
|
self._default_arch = native_support
|
||||||
self._supported_arch.append(self.default)
|
self._supported_arch.append(self.default)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -53,6 +65,10 @@ class CpuArch(CoreSysAttributes):
|
|||||||
self._supported_arch.extend(arch_data[self.sys_machine])
|
self._supported_arch.extend(arch_data[self.sys_machine])
|
||||||
self._default_arch = self.supported[0]
|
self._default_arch = self.supported[0]
|
||||||
|
|
||||||
|
# Make sure native support is in supported list
|
||||||
|
if native_support not in self._supported_arch:
|
||||||
|
self._supported_arch.append(native_support)
|
||||||
|
|
||||||
def is_supported(self, arch_list: List[str]) -> bool:
|
def is_supported(self, arch_list: List[str]) -> bool:
|
||||||
"""Return True if there is a supported arch by this platform."""
|
"""Return True if there is a supported arch by this platform."""
|
||||||
return not set(self.supported).isdisjoint(set(arch_list))
|
return not set(self.supported).isdisjoint(set(arch_list))
|
||||||
@ -63,3 +79,11 @@ class CpuArch(CoreSysAttributes):
|
|||||||
if self_arch in arch_list:
|
if self_arch in arch_list:
|
||||||
return self_arch
|
return self_arch
|
||||||
raise HassioArchNotFound()
|
raise HassioArchNotFound()
|
||||||
|
|
||||||
|
def detect_cpu(self) -> str:
|
||||||
|
"""Return the arch type of local CPU."""
|
||||||
|
cpu = platform.machine()
|
||||||
|
for check, value in MAP_CPU.items():
|
||||||
|
if cpu.startswith(check):
|
||||||
|
return value
|
||||||
|
return self.sys_supervisor.arch
|
||||||
|
@ -3,7 +3,7 @@ from pathlib import Path
|
|||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
|
|
||||||
HASSIO_VERSION = "165"
|
HASSIO_VERSION = "166"
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||||
|
11
hassio/discovery/services/adguard.py
Normal file
11
hassio/discovery/services/adguard.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""Discovery service for AdGuard."""
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from hassio.validate import NETWORK_PORT
|
||||||
|
|
||||||
|
from ..const import ATTR_HOST, ATTR_PORT
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA = vol.Schema(
|
||||||
|
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT}
|
||||||
|
)
|
@ -8,7 +8,6 @@ import os
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
import socket
|
|
||||||
import time
|
import time
|
||||||
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
@ -42,7 +41,7 @@ from .exceptions import (
|
|||||||
HomeAssistantError,
|
HomeAssistantError,
|
||||||
HomeAssistantUpdateError,
|
HomeAssistantUpdateError,
|
||||||
)
|
)
|
||||||
from .utils import convert_to_ascii, process_lock
|
from .utils import convert_to_ascii, process_lock, check_port
|
||||||
from .utils.json import JsonConfig
|
from .utils.json import JsonConfig
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
|
|
||||||
@ -511,22 +510,14 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
async def _block_till_run(self) -> None:
|
async def _block_till_run(self) -> None:
|
||||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||||
start_time = time.monotonic()
|
start_time = time.monotonic()
|
||||||
|
|
||||||
|
# Database migration
|
||||||
migration_progress = False
|
migration_progress = False
|
||||||
migration_file = Path(self.sys_config.path_homeassistant, ".migration_progress")
|
migration_file = Path(self.sys_config.path_homeassistant, ".migration_progress")
|
||||||
|
|
||||||
def check_port():
|
# PIP installation
|
||||||
"""Check if port is mapped."""
|
pip_progress = False
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
pip_file = Path(self.sys_config.path_homeassistant, ".pip_progress")
|
||||||
try:
|
|
||||||
result = sock.connect_ex((str(self.ip_address), self.api_port))
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
# Check if the port is available
|
|
||||||
if result == 0:
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
await asyncio.sleep(5)
|
await asyncio.sleep(5)
|
||||||
@ -537,7 +528,9 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
break
|
break
|
||||||
|
|
||||||
# 2: Check if API response
|
# 2: Check if API response
|
||||||
if await self.sys_run_in_executor(check_port):
|
if await self.sys_run_in_executor(
|
||||||
|
check_port, self.ip_address, self.api_port
|
||||||
|
):
|
||||||
_LOGGER.info("Detect a running Home Assistant instance")
|
_LOGGER.info("Detect a running Home Assistant instance")
|
||||||
self._error_state = False
|
self._error_state = False
|
||||||
return
|
return
|
||||||
@ -553,7 +546,18 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
start_time = time.monotonic()
|
start_time = time.monotonic()
|
||||||
_LOGGER.info("Home Assistant record migration done")
|
_LOGGER.info("Home Assistant record migration done")
|
||||||
|
|
||||||
# 4: Timeout
|
# 4: Running PIP installation
|
||||||
|
if pip_file.exists():
|
||||||
|
if not pip_progress:
|
||||||
|
pip_progress = True
|
||||||
|
_LOGGER.info("Home Assistant pip installation in progress")
|
||||||
|
continue
|
||||||
|
elif pip_progress:
|
||||||
|
pip_progress = False # Reset start time
|
||||||
|
start_time = time.monotonic()
|
||||||
|
_LOGGER.info("Home Assistant pip installation done")
|
||||||
|
|
||||||
|
# 5: Timeout
|
||||||
if time.monotonic() - start_time > self.wait_boot:
|
if time.monotonic() - start_time > self.wait_boot:
|
||||||
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
||||||
break
|
break
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
"""Tools file for Hass.io."""
|
"""Tools file for Hass.io."""
|
||||||
|
from datetime import datetime
|
||||||
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from datetime import datetime
|
import socket
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||||
|
|
||||||
|
|
||||||
def convert_to_ascii(raw) -> str:
|
def convert_to_ascii(raw: bytes) -> str:
|
||||||
"""Convert binary to ascii and remove colors."""
|
"""Convert binary to ascii and remove colors."""
|
||||||
return RE_STRING.sub("", raw.decode())
|
return RE_STRING.sub("", raw.decode())
|
||||||
|
|
||||||
@ -53,3 +55,18 @@ class AsyncThrottle:
|
|||||||
return await method(*args, **kwargs)
|
return await method(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def check_port(address: IPv4Address, port: int) -> bool:
|
||||||
|
"""Check if port is mapped."""
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
try:
|
||||||
|
result = sock.connect_ex((str(address), port))
|
||||||
|
sock.close()
|
||||||
|
|
||||||
|
# Check if the port is available
|
||||||
|
if result == 0:
|
||||||
|
return True
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
@ -4,7 +4,7 @@ attrs==19.1.0
|
|||||||
cchardet==2.1.4
|
cchardet==2.1.4
|
||||||
colorlog==4.0.2
|
colorlog==4.0.2
|
||||||
cpe==1.2.1
|
cpe==1.2.1
|
||||||
cryptography==2.6.1
|
cryptography==2.7
|
||||||
docker==4.0.1
|
docker==4.0.1
|
||||||
gitpython==2.1.11
|
gitpython==2.1.11
|
||||||
pytz==2019.1
|
pytz==2019.1
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
flake8==3.7.7
|
flake8==3.7.7
|
||||||
pylint==2.3.1
|
pylint==2.3.1
|
||||||
pytest==4.5.0
|
pytest==4.6.2
|
||||||
pytest-timeout==1.3.3
|
pytest-timeout==1.3.3
|
||||||
pytest-aiohttp==0.3.0
|
pytest-aiohttp==0.3.0
|
||||||
|
55
setup.py
55
setup.py
@ -3,33 +3,40 @@ from setuptools import setup
|
|||||||
from hassio.const import HASSIO_VERSION
|
from hassio.const import HASSIO_VERSION
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='HassIO',
|
name="HassIO",
|
||||||
version=HASSIO_VERSION,
|
version=HASSIO_VERSION,
|
||||||
license='BSD License',
|
license="BSD License",
|
||||||
author='The Home Assistant Authors',
|
author="The Home Assistant Authors",
|
||||||
author_email='hello@home-assistant.io',
|
author_email="hello@home-assistant.io",
|
||||||
url='https://home-assistant.io/',
|
url="https://home-assistant.io/",
|
||||||
description=('Open-source private cloud os for Home-Assistant'
|
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
|
||||||
' based on HassOS'),
|
long_description=(
|
||||||
long_description=('A maintainless private cloud operator system that'
|
"A maintainless private cloud operator system that"
|
||||||
'setup a Home-Assistant instance. Based on HassOS'),
|
"setup a Home-Assistant instance. Based on HassOS"
|
||||||
|
),
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Intended Audience :: End Users/Desktop',
|
"Intended Audience :: End Users/Desktop",
|
||||||
'Intended Audience :: Developers',
|
"Intended Audience :: Developers",
|
||||||
'License :: OSI Approved :: Apache Software License',
|
"License :: OSI Approved :: Apache Software License",
|
||||||
'Operating System :: OS Independent',
|
"Operating System :: OS Independent",
|
||||||
'Topic :: Home Automation'
|
"Topic :: Home Automation"
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||||
'Development Status :: 5 - Production/Stable',
|
"Development Status :: 5 - Production/Stable",
|
||||||
'Intended Audience :: Developers',
|
"Intended Audience :: Developers",
|
||||||
'Programming Language :: Python :: 3.6',
|
"Programming Language :: Python :: 3.6",
|
||||||
],
|
],
|
||||||
keywords=['docker', 'home-assistant', 'api'],
|
keywords=["docker", "home-assistant", "api"],
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
platforms='any',
|
platforms="any",
|
||||||
packages=[
|
packages=[
|
||||||
'hassio', 'hassio.docker', 'hassio.addons', 'hassio.api', 'hassio.misc',
|
"hassio",
|
||||||
'hassio.utils', 'hassio.snapshots'
|
"hassio.docker",
|
||||||
|
"hassio.addons",
|
||||||
|
"hassio.api",
|
||||||
|
"hassio.misc",
|
||||||
|
"hassio.utils",
|
||||||
|
"hassio.snapshots",
|
||||||
],
|
],
|
||||||
include_package_data=True)
|
include_package_data=True,
|
||||||
|
)
|
||||||
|
@ -1 +1 @@
|
|||||||
"""Hass.io Testframework."""
|
"""Hass.io Testframework."""
|
||||||
|
@ -1 +1 @@
|
|||||||
"""Add-ons tests."""
|
"""Add-ons tests."""
|
||||||
|
@ -14,34 +14,35 @@ def test_basic_config():
|
|||||||
|
|
||||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
assert valid_config['name'] == "Test Add-on"
|
assert valid_config["name"] == "Test Add-on"
|
||||||
assert valid_config['image'] == "test/{arch}-my-custom-addon"
|
assert valid_config["image"] == "test/{arch}-my-custom-addon"
|
||||||
|
|
||||||
# Check defaults
|
# Check defaults
|
||||||
assert not valid_config['host_network']
|
assert not valid_config["host_network"]
|
||||||
assert not valid_config['host_ipc']
|
assert not valid_config["host_ipc"]
|
||||||
assert not valid_config['host_dbus']
|
assert not valid_config["host_dbus"]
|
||||||
assert not valid_config['host_pid']
|
assert not valid_config["host_pid"]
|
||||||
|
|
||||||
assert not valid_config['hassio_api']
|
assert not valid_config["hassio_api"]
|
||||||
assert not valid_config['homeassistant_api']
|
assert not valid_config["homeassistant_api"]
|
||||||
assert not valid_config['docker_api']
|
assert not valid_config["docker_api"]
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_repository():
|
def test_invalid_repository():
|
||||||
"""Validate basic config with invalid repositories."""
|
"""Validate basic config with invalid repositories."""
|
||||||
config = load_json_fixture("basic-addon-config.json")
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
config['image'] = "something"
|
config["image"] = "something"
|
||||||
with pytest.raises(vol.Invalid):
|
with pytest.raises(vol.Invalid):
|
||||||
vd.SCHEMA_ADDON_CONFIG(config)
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
config['image'] = "homeassistant/no-valid-repo:no-tag-allow"
|
config["image"] = "homeassistant/no-valid-repo:no-tag-allow"
|
||||||
with pytest.raises(vol.Invalid):
|
with pytest.raises(vol.Invalid):
|
||||||
vd.SCHEMA_ADDON_CONFIG(config)
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
config[
|
config[
|
||||||
'image'] = "registry.gitlab.com/company/add-ons/test-example/text-example:no-tag-allow"
|
"image"
|
||||||
|
] = "registry.gitlab.com/company/add-ons/test-example/text-example:no-tag-allow"
|
||||||
with pytest.raises(vol.Invalid):
|
with pytest.raises(vol.Invalid):
|
||||||
vd.SCHEMA_ADDON_CONFIG(config)
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
@ -51,16 +52,16 @@ def test_valid_repository():
|
|||||||
config = load_json_fixture("basic-addon-config.json")
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
custom_registry = "registry.gitlab.com/company/add-ons/core/test-example"
|
custom_registry = "registry.gitlab.com/company/add-ons/core/test-example"
|
||||||
config['image'] = custom_registry
|
config["image"] = custom_registry
|
||||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
assert valid_config['image'] == custom_registry
|
assert valid_config["image"] == custom_registry
|
||||||
|
|
||||||
|
|
||||||
def test_valid_map():
|
def test_valid_map():
|
||||||
"""Validate basic config with different valid maps"""
|
"""Validate basic config with different valid maps"""
|
||||||
config = load_json_fixture("basic-addon-config.json")
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
config['map'] = ['backup:rw', 'ssl:ro', 'config']
|
config["map"] = ["backup:rw", "ssl:ro", "config"]
|
||||||
vd.SCHEMA_ADDON_CONFIG(config)
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
|
|
||||||
|
19
tests/discovery/test_adguard.py
Normal file
19
tests/discovery/test_adguard.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
"""Test adguard discovery."""
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from hassio.discovery.validate import valid_discovery_config
|
||||||
|
|
||||||
|
|
||||||
|
def test_good_config():
|
||||||
|
"""Test good deconz config."""
|
||||||
|
|
||||||
|
valid_discovery_config("adguard", {"host": "test", "port": 3812})
|
||||||
|
|
||||||
|
|
||||||
|
def test_bad_config():
|
||||||
|
"""Test good adguard config."""
|
||||||
|
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
valid_discovery_config("adguard", {"host": "test"})
|
@ -1,4 +1,15 @@
|
|||||||
"""Test arch object."""
|
"""Test arch object."""
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def mock_detect_cpu():
|
||||||
|
"""Mock cpu detection."""
|
||||||
|
with patch("platform.machine") as detect_mock:
|
||||||
|
detect_mock.return_value = "Unknown"
|
||||||
|
yield detect_mock
|
||||||
|
|
||||||
|
|
||||||
async def test_machine_not_exits(coresys, sys_machine, sys_supervisor):
|
async def test_machine_not_exits(coresys, sys_machine, sys_supervisor):
|
||||||
@ -32,118 +43,144 @@ async def test_supervisor_arch(coresys, sys_machine, sys_supervisor):
|
|||||||
assert coresys.arch.supervisor == "amd64"
|
assert coresys.arch.supervisor == "amd64"
|
||||||
|
|
||||||
|
|
||||||
async def test_raspberrypi_arch(coresys, sys_machine):
|
async def test_raspberrypi_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for raspberrypi."""
|
"""Test arch for raspberrypi."""
|
||||||
sys_machine.return_value = "raspberrypi"
|
sys_machine.return_value = "raspberrypi"
|
||||||
|
sys_supervisor.arch = "armhf"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "armhf"
|
assert coresys.arch.default == "armhf"
|
||||||
assert coresys.arch.supported == ["armhf"]
|
assert coresys.arch.supported == ["armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_raspberrypi2_arch(coresys, sys_machine):
|
async def test_raspberrypi2_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for raspberrypi2."""
|
"""Test arch for raspberrypi2."""
|
||||||
sys_machine.return_value = "raspberrypi2"
|
sys_machine.return_value = "raspberrypi2"
|
||||||
|
sys_supervisor.arch = "armv7"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "armv7"
|
assert coresys.arch.default == "armv7"
|
||||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_raspberrypi3_arch(coresys, sys_machine):
|
async def test_raspberrypi3_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for raspberrypi3."""
|
"""Test arch for raspberrypi3."""
|
||||||
sys_machine.return_value = "raspberrypi3"
|
sys_machine.return_value = "raspberrypi3"
|
||||||
|
sys_supervisor.arch = "armv7"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "armv7"
|
assert coresys.arch.default == "armv7"
|
||||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_raspberrypi3_64_arch(coresys, sys_machine):
|
async def test_raspberrypi3_64_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for raspberrypi3_64."""
|
"""Test arch for raspberrypi3_64."""
|
||||||
sys_machine.return_value = "raspberrypi3-64"
|
sys_machine.return_value = "raspberrypi3-64"
|
||||||
|
sys_supervisor.arch = "aarch64"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "aarch64"
|
assert coresys.arch.default == "aarch64"
|
||||||
assert coresys.arch.supported == ["aarch64", "armv7", "armhf"]
|
assert coresys.arch.supported == ["aarch64", "armv7", "armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_tinker_arch(coresys, sys_machine):
|
async def test_tinker_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for tinker."""
|
"""Test arch for tinker."""
|
||||||
sys_machine.return_value = "tinker"
|
sys_machine.return_value = "tinker"
|
||||||
|
sys_supervisor.arch = "armv7"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "armv7"
|
assert coresys.arch.default == "armv7"
|
||||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_odroid_c2_arch(coresys, sys_machine):
|
async def test_odroid_c2_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for odroid-c2."""
|
"""Test arch for odroid-c2."""
|
||||||
sys_machine.return_value = "odroid-c2"
|
sys_machine.return_value = "odroid-c2"
|
||||||
|
sys_supervisor.arch = "aarch64"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "aarch64"
|
assert coresys.arch.default == "aarch64"
|
||||||
assert coresys.arch.supported == ["aarch64"]
|
assert coresys.arch.supported == ["aarch64"]
|
||||||
|
|
||||||
|
|
||||||
async def test_odroid_xu_arch(coresys, sys_machine):
|
async def test_odroid_xu_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for odroid-xu."""
|
"""Test arch for odroid-xu."""
|
||||||
sys_machine.return_value = "odroid-xu"
|
sys_machine.return_value = "odroid-xu"
|
||||||
|
sys_supervisor.arch = "armv7"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "armv7"
|
assert coresys.arch.default == "armv7"
|
||||||
assert coresys.arch.supported == ["armv7", "armhf"]
|
assert coresys.arch.supported == ["armv7", "armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_orangepi_prime_arch(coresys, sys_machine):
|
async def test_orangepi_prime_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for orangepi_prime."""
|
"""Test arch for orangepi_prime."""
|
||||||
sys_machine.return_value = "orangepi-prime"
|
sys_machine.return_value = "orangepi-prime"
|
||||||
|
sys_supervisor.arch = "aarch64"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "aarch64"
|
assert coresys.arch.default == "aarch64"
|
||||||
assert coresys.arch.supported == ["aarch64"]
|
assert coresys.arch.supported == ["aarch64"]
|
||||||
|
|
||||||
|
|
||||||
async def test_intel_nuc_arch(coresys, sys_machine):
|
async def test_intel_nuc_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for intel-nuc."""
|
"""Test arch for intel-nuc."""
|
||||||
sys_machine.return_value = "intel-nuc"
|
sys_machine.return_value = "intel-nuc"
|
||||||
|
sys_supervisor.arch = "amd64"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "amd64"
|
assert coresys.arch.default == "amd64"
|
||||||
assert coresys.arch.supported == ["amd64", "i386"]
|
assert coresys.arch.supported == ["amd64", "i386"]
|
||||||
|
|
||||||
|
|
||||||
async def test_qemux86_arch(coresys, sys_machine):
|
async def test_qemux86_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for qemux86."""
|
"""Test arch for qemux86."""
|
||||||
sys_machine.return_value = "qemux86"
|
sys_machine.return_value = "qemux86"
|
||||||
|
sys_supervisor.arch = "i386"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "i386"
|
assert coresys.arch.default == "i386"
|
||||||
assert coresys.arch.supported == ["i386"]
|
assert coresys.arch.supported == ["i386"]
|
||||||
|
|
||||||
|
|
||||||
async def test_qemux86_64_arch(coresys, sys_machine):
|
async def test_qemux86_64_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for qemux86-64."""
|
"""Test arch for qemux86-64."""
|
||||||
sys_machine.return_value = "qemux86-64"
|
sys_machine.return_value = "qemux86-64"
|
||||||
|
sys_supervisor.arch = "amd64"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "amd64"
|
assert coresys.arch.default == "amd64"
|
||||||
assert coresys.arch.supported == ["amd64", "i386"]
|
assert coresys.arch.supported == ["amd64", "i386"]
|
||||||
|
|
||||||
|
|
||||||
async def test_qemuarm_arch(coresys, sys_machine):
|
async def test_qemuarm_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for qemuarm."""
|
"""Test arch for qemuarm."""
|
||||||
sys_machine.return_value = "qemuarm"
|
sys_machine.return_value = "qemuarm"
|
||||||
|
sys_supervisor.arch = "armhf"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "armhf"
|
assert coresys.arch.default == "armhf"
|
||||||
assert coresys.arch.supported == ["armhf"]
|
assert coresys.arch.supported == ["armhf"]
|
||||||
|
|
||||||
|
|
||||||
async def test_qemuarm_64_arch(coresys, sys_machine):
|
async def test_qemuarm_64_arch(coresys, sys_machine, sys_supervisor):
|
||||||
"""Test arch for qemuarm-64."""
|
"""Test arch for qemuarm-64."""
|
||||||
sys_machine.return_value = "qemuarm-64"
|
sys_machine.return_value = "qemuarm-64"
|
||||||
|
sys_supervisor.arch = "aarch64"
|
||||||
await coresys.arch.load()
|
await coresys.arch.load()
|
||||||
|
|
||||||
assert coresys.arch.default == "aarch64"
|
assert coresys.arch.default == "aarch64"
|
||||||
assert coresys.arch.supported == ["aarch64"]
|
assert coresys.arch.supported == ["aarch64"]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_qemuarm_arch_native_armv7(
|
||||||
|
coresys, sys_machine, mock_detect_cpu, sys_supervisor
|
||||||
|
):
|
||||||
|
"""Test arch for qemuarm."""
|
||||||
|
sys_machine.return_value = "qemuarm"
|
||||||
|
sys_supervisor.arch = "armhf"
|
||||||
|
mock_detect_cpu.return_value = "armv7l"
|
||||||
|
await coresys.arch.load()
|
||||||
|
|
||||||
|
assert coresys.arch.default == "armhf"
|
||||||
|
assert coresys.arch.supported == ["armhf", "armv7"]
|
||||||
|
14
tests/utils/test_check_port.py
Normal file
14
tests/utils/test_check_port.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
"""Check ports."""
|
||||||
|
from ipaddress import ip_address
|
||||||
|
|
||||||
|
from hassio.utils import check_port
|
||||||
|
|
||||||
|
|
||||||
|
def test_exists_open_port():
|
||||||
|
"""Test a exists network port."""
|
||||||
|
assert check_port(ip_address("8.8.8.8"), 53)
|
||||||
|
|
||||||
|
|
||||||
|
def test_not_exists_port():
|
||||||
|
"""Test a not exists network service."""
|
||||||
|
assert not check_port(ip_address("192.0.2.1"), 53)
|
Loading…
x
Reference in New Issue
Block a user