Merge pull request #1139 from home-assistant/dev

Release 167
This commit is contained in:
Pascal Vizeli 2019-06-25 17:34:42 +02:00 committed by GitHub
commit f374852801
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 277 additions and 221 deletions

12
.devcontainer/Dockerfile Normal file
View File

@ -0,0 +1,12 @@
FROM python:3.7
WORKDIR /workspace
# Install Python dependencies from requirements.txt if it exists
COPY requirements.txt requirements_tests.txt /workspace/
RUN pip install -r requirements.txt \
&& pip3 install -r requirements_tests.txt \
&& pip install black tox
# Set the default shell to bash instead of sh
ENV SHELL /bin/bash

View File

@ -0,0 +1,18 @@
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
{
"name": "Hass.io dev",
"context": "..",
"dockerFile": "Dockerfile",
"extensions": [
"ms-python.python"
],
"settings": {
"python.pythonPath": "/usr/local/bin/python",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true
}
}

View File

@ -12,17 +12,23 @@ RUN apk add --no-cache \
socat \
glib \
libstdc++ \
eudev \
eudev-libs
# Install requirements
COPY requirements.txt /usr/src/
RUN export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --no-cache-dir --find-links https://wheels.hass.io/alpine-3.9/${BUILD_ARCH}/ \
&& pip3 install --no-cache-dir --find-links "https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
-r /usr/src/requirements.txt \
&& rm -f /usr/src/requirements.txt
# Install HassIO
COPY . /usr/src/hassio
RUN pip3 install --no-cache-dir -e /usr/src/hassio
RUN pip3 install --no-cache-dir -e /usr/src/hassio \
&& python3 -m compileall /usr/src/hassio/hassio
# Initialize udev daemon, handle CMD
COPY entry.sh /bin/
ENTRYPOINT ["/bin/entry.sh"]
CMD [ "python3", "-m", "hassio" ]

View File

@ -14,183 +14,155 @@ trigger:
pr:
- dev
variables:
- name: basePythonTag
value: '3.7-alpine3.10'
- name: versionHadolint
value: 'v1.16.3'
- name: versionBuilder
value: '3.2'
value: '4.4'
- name: versionWheels
value: '0.6'
value: '0.11'
- group: docker
- group: wheels
jobs:
stages:
- job: 'Tox'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.7'
inputs:
versionSpec: '3.7'
- script: pip install tox
displayName: 'Install Tox'
- script: tox
displayName: 'Run Tox'
- stage: 'Test'
jobs:
- job: 'Tox'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.7'
inputs:
versionSpec: '3.7'
- script: pip install tox
displayName: 'Install Tox'
- script: tox
displayName: 'Run Tox'
- job: 'Black'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python $(python.version)'
inputs:
versionSpec: '3.7'
- script: pip install black
displayName: 'Install black'
- script: black --check hassio tests
displayName: 'Run Black'
- job: 'JQ'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo apt-get install -y jq
displayName: 'Install JQ'
- bash: |
shopt -s globstar
cat **/*.json | jq '.'
displayName: 'Run JQ'
- job: 'Hadolint'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
displayName: 'Install Hadolint'
- script: |
sudo docker run --rm -i \
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
hadolint/hadolint:$(versionHadolint) < Dockerfile
displayName: 'Run Hadolint'
- stage: 'Wheels'
jobs:
- job: 'Wheels'
condition: eq(variables['Build.SourceBranchName'], 'dev')
timeoutInMinutes: 360
pool:
vmImage: 'ubuntu-latest'
strategy:
maxParallel: 3
matrix:
amd64:
buildArch: 'amd64'
i386:
buildArch: 'i386'
armhf:
buildArch: 'armhf'
armv7:
buildArch: 'armv7'
aarch64:
buildArch: 'aarch64'
steps:
- script: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
binfmt-support \
curl
- job: 'Black'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python $(python.version)'
inputs:
versionSpec: '3.7'
- script: pip install black
displayName: 'Install black'
- script: black --check hassio tests
displayName: 'Run Black'
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
sudo update-binfmts --enable qemu-arm
sudo update-binfmts --enable qemu-aarch64
displayName: 'Initial cross build'
- script: |
mkdir -p .ssh
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
chmod 600 .ssh/*
displayName: 'Install ssh key'
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)-$(basePythonTag)
displayName: 'Install wheels builder'
- script: |
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
homeassistant/$(buildArch)-wheels:$(versionWheels)-$(basePythonTag) \
--apk "build-base;libffi-dev;openssl-dev" \
--index $(wheelsIndex) \
--requirement requirements.txt \
--upload rsync \
--remote wheels@$(wheelsHost):/opt/wheels
displayName: 'Run wheels build'
- stage: 'Deploy'
jobs:
- job: 'VersionValidate'
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.7'
inputs:
versionSpec: '3.7'
- script: |
setup_version="$(python setup.py -V)"
branch_version="$(Build.SourceBranchName)"
- job: 'JQ'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo apt-get install -y jq
displayName: 'Install JQ'
- bash: |
shopt -s globstar
cat **/*.json | jq '.'
displayName: 'Run JQ'
- job: 'Hadolint'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
displayName: 'Install Hadolint'
- script: |
sudo docker run --rm -i \
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
hadolint/hadolint:$(versionHadolint) < Dockerfile
displayName: 'Run Hadolint'
- job: 'Wheels'
condition: eq(variables['Build.SourceBranchName'], 'dev')
timeoutInMinutes: 360
pool:
vmImage: 'ubuntu-latest'
strategy:
maxParallel: 3
matrix:
amd64:
buildArch: 'amd64'
i386:
buildArch: 'i386'
armhf:
buildArch: 'armhf'
armv7:
buildArch: 'armv7'
aarch64:
buildArch: 'aarch64'
steps:
- script: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
binfmt-support \
curl
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
sudo update-binfmts --enable qemu-arm
sudo update-binfmts --enable qemu-aarch64
displayName: 'Initial cross build'
- script: |
mkdir -p .ssh
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
chmod 600 .ssh/*
displayName: 'Install ssh key'
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
displayName: 'Install wheels builder'
- script: |
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/hassio/master/requirements.txt
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
homeassistant/$(buildArch)-wheels:$(versionWheels) \
--apk "build-base;libffi-dev;openssl-dev" \
--index $(wheelsIndex) \
--requirement requirements.txt \
--requirement-diff requirements_diff.txt \
--upload rsync \
--remote wheels@$(wheelsHost):/opt/wheels
displayName: 'Run wheels build'
- job: 'ReleaseDEV'
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('Wheels'))
dependsOn:
- 'JQ'
- 'Tox'
- 'Hadolint'
- 'Wheels'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
displayName: 'Docker hub login'
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
displayName: 'Install Builder'
- script: |
sudo docker run --rm --privileged \
-v ~/.docker:/root/.docker \
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
homeassistant/amd64-builder:$(versionBuilder) \
--supervisor --all -t /data --version dev --docker-hub homeassistant
displayName: 'Build DEV'
- job: 'VersionValidate'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.7'
inputs:
versionSpec: '3.7'
- script: |
setup_version="$(python setup.py -V)"
branch_version="$(Build.SourceBranchName)"
if [ "${setup_version}" != "${branch_version}" ]; then
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
exit 1
fi
displayName: 'Check version of branch/tag'
- job: 'Release'
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('VersionValidate'))
dependsOn:
- 'JQ'
- 'Tox'
- 'Hadolint'
- 'VersionValidate'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
displayName: 'Docker hub login'
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
displayName: 'Install Builder'
- script: |
sudo docker run --rm --privileged \
-v ~/.docker:/root/.docker \
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
homeassistant/amd64-builder:$(versionBuilder) \
--supervisor --all -t /data --docker-hub homeassistant
displayName: 'Build Release'
if [ "${branch_version}" == "dev" ]; then
exit 0
elif [ "${setup_version}" != "${branch_version}" ]; then
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
exit 1
fi
displayName: 'Check version of branch/tag'
- job: 'Release'
dependsOn:
- 'VersionValidate'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
displayName: 'Docker hub login'
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
displayName: 'Install Builder'
- script: |
sudo docker run --rm --privileged \
-v ~/.docker:/root/.docker \
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
homeassistant/amd64-builder:$(versionBuilder) \
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
--all -t /data --docker-hub homeassistant
displayName: 'Build Release'

13
entry.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/bash
set -e
udevd --daemon
udevadm trigger
if CMD="$(command -v "$1")"; then
shift
exec "$CMD" "$@"
else
echo "Command not found: $1"
exit 1
fi

View File

@ -39,12 +39,9 @@ from ..const import (
ATTR_IMAGE,
ATTR_INGRESS,
ATTR_INGRESS_ENTRY,
ATTR_INGRESS_PANEL,
ATTR_INGRESS_PORT,
ATTR_INGRESS_TOKEN,
ATTR_INGRESS_PANEL,
ATTR_PANEL_ADMIN,
ATTR_PANEL_ICON,
ATTR_PANEL_TITLE,
ATTR_KERNEL_MODULES,
ATTR_LEGACY,
ATTR_LOCATON,
@ -53,6 +50,9 @@ from ..const import (
ATTR_NAME,
ATTR_NETWORK,
ATTR_OPTIONS,
ATTR_PANEL_ADMIN,
ATTR_PANEL_ICON,
ATTR_PANEL_TITLE,
ATTR_PORTS,
ATTR_PORTS_DESCRIPTION,
ATTR_PRIVILEGED,

View File

@ -22,10 +22,11 @@ from .host import HostManager
from .ingress import Ingress
from .services import ServiceManager
from .snapshots import SnapshotManager
from .supervisor import Supervisor
from .store import StoreManager
from .supervisor import Supervisor
from .tasks import Tasks
from .updater import Updater
from .utils.dt import fetch_timezone
_LOGGER = logging.getLogger(__name__)
@ -66,6 +67,10 @@ async def initialize_coresys():
if MACHINE_ID.exists():
coresys.machine_id = MACHINE_ID.read_text().strip()
# Init TimeZone
if coresys.config.timezone == "UTC":
coresys.config.timezone = await fetch_timezone(coresys.websession)
return coresys

View File

@ -3,9 +3,6 @@ from datetime import datetime
import logging
import os
from pathlib import Path, PurePath
import re
import pytz
from .const import (
ATTR_ADDONS_CUSTOM_LIST,
@ -40,8 +37,6 @@ APPARMOR_DATA = PurePath("apparmor")
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
class CoreConfig(JsonConfig):
"""Hold all core config data."""
@ -53,21 +48,7 @@ class CoreConfig(JsonConfig):
@property
def timezone(self):
"""Return system timezone."""
config_file = Path(self.path_homeassistant, "configuration.yaml")
try:
assert config_file.exists()
configuration = config_file.read_text()
data = RE_TIMEZONE.search(configuration)
assert data
timezone = data.group("timezone")
pytz.timezone(timezone)
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
_LOGGER.debug("Can't parse Home Assistant timezone")
return self._data[ATTR_TIMEZONE]
return timezone
return self._data[ATTR_TIMEZONE]
@timezone.setter
def timezone(self, value):

View File

@ -3,7 +3,7 @@ from pathlib import Path
from ipaddress import ip_network
HASSIO_VERSION = "166"
HASSIO_VERSION = "167"
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"

View File

@ -12,7 +12,7 @@ from .const import (
STARTUP_APPLICATION,
STARTUP_INITIALIZE,
)
from .exceptions import HassioError, HomeAssistantError
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
_LOGGER = logging.getLogger(__name__)
@ -75,10 +75,16 @@ class HassIO(CoreSysAttributes):
"""Start Hass.io orchestration."""
# on release channel, try update itself
if self.sys_supervisor.need_update:
if self.sys_dev:
_LOGGER.warning("Ignore Hass.io updates on dev!")
elif await self.sys_supervisor.update():
return
try:
if self.sys_dev:
_LOGGER.warning("Ignore Hass.io updates on dev!")
else:
await self.sys_supervisor.update()
except SupervisorUpdateError:
_LOGGER.fatal(
"Can't update supervisor! This will break some Add-ons or affect "
"future version of Home Assistant!"
)
# start api
await self.sys_api.start()

View File

@ -85,11 +85,16 @@ class Discovery(CoreSysAttributes, JsonConfig):
message = Message(addon.slug, service, config)
# Already exists?
for old_message in self.list_messages:
if old_message != message:
for exists_msg in self.list_messages:
if exists_msg != message:
continue
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
return old_message
if exists_msg.config != config:
message = exists_msg
message.config = config
else:
_LOGGER.debug("Duplicate discovery message from %s", addon.slug)
return exists_msg
break
_LOGGER.info("Send discovery to Home Assistant %s from %s", service, addon.slug)
self.message_obj[message.uuid] = message

View File

@ -40,14 +40,6 @@ class DockerHomeAssistant(DockerInterface):
"""Return timeout for Docker actions."""
return 60
@property
def devices(self):
"""Create list of special device to map into Docker."""
devices = []
for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm")
return devices or None
@property
def ip_address(self) -> IPv4Address:
"""Return IP address of this container."""
@ -73,7 +65,6 @@ class DockerHomeAssistant(DockerInterface):
detach=True,
privileged=True,
init=True,
devices=self.devices,
network_mode="host",
environment={
"HASSIO": self.sys_docker.network.supervisor,
@ -106,7 +97,6 @@ class DockerHomeAssistant(DockerInterface):
command,
privileged=True,
init=True,
devices=self.devices,
detach=True,
stdout=True,
stderr=True,

View File

@ -36,9 +36,15 @@ class Hardware:
"""Return all serial and connected devices."""
dev_list = set()
for device in self.context.list_devices(subsystem="tty"):
if "ID_VENDOR" in device or RE_TTY.search(device.device_node):
if "ID_VENDOR" in device.properties or RE_TTY.search(device.device_node):
dev_list.add(device.device_node)
# Add /dev/serial/by-id devlink for current device
for dev_link in device.device_links:
if not dev_link.startswith("/dev/serial/by-id"):
continue
dev_list.add(dev_link)
return dev_list
@property
@ -46,8 +52,8 @@ class Hardware:
"""Return all input devices."""
dev_list = set()
for device in self.context.list_devices(subsystem="input"):
if "NAME" in device:
dev_list.add(device["NAME"].replace('"', ""))
if "NAME" in device.properties:
dev_list.add(device.properties["NAME"].replace('"', ""))
return dev_list
@ -56,7 +62,7 @@ class Hardware:
"""Return all disk devices."""
dev_list = set()
for device in self.context.list_devices(subsystem="block"):
if device.device_node.startswith("/dev/sd"):
if "ID_NAME" in device.properties:
dev_list.add(device.device_node)
return dev_list

View File

@ -60,6 +60,7 @@ class AsyncThrottle:
def check_port(address: IPv4Address, port: int) -> bool:
"""Check if port is mapped."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.5)
try:
result = sock.connect_ex((str(address), port))
sock.close()

View File

@ -1,13 +1,17 @@
"""Tools file for Hass.io."""
import asyncio
from datetime import datetime, timedelta, timezone, tzinfo
import logging
import re
from typing import Any, Dict, Optional
import aiohttp
import pytz
UTC = pytz.utc
GEOIP_URL = "http://ip-api.com/json/"
_LOGGER = logging.getLogger(__name__)
@ -22,6 +26,21 @@ DATETIME_RE = re.compile(
)
async def fetch_timezone(websession):
"""Read timezone from freegeoip."""
data = {}
try:
async with websession.get(GEOIP_URL, timeout=10) as request:
data = await request.json()
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
except ValueError as err:
_LOGGER.warning("Error on parse freegeoip data: %s", err)
return data.get("timezone", "UTC")
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE

View File

@ -5,7 +5,7 @@ cchardet==2.1.4
colorlog==4.0.2
cpe==1.2.1
cryptography==2.7
docker==4.0.1
docker==4.0.2
gitpython==2.1.11
pytz==2019.1
pyudev==0.21.0

View File

@ -1,5 +1,5 @@
flake8==3.7.7
pylint==2.3.1
pytest==4.6.2
pytest==4.6.3
pytest-timeout==1.3.3
pytest-aiohttp==0.3.0

View File

@ -7,3 +7,20 @@ def load_json_fixture(filename):
"""Load a fixture."""
path = Path(Path(__file__).parent.joinpath("fixtures"), filename)
return json.loads(path.read_text())
def mock_coro(return_value=None, exception=None):
"""Return a coro that returns a value or raise an exception."""
return mock_coro_func(return_value, exception)()
def mock_coro_func(return_value=None, exception=None):
"""Return a method to create a coro function that returns a value."""
async def coro(*args, **kwargs):
"""Fake coroutine."""
if exception:
raise exception
return return_value
return coro

View File

@ -5,6 +5,8 @@ import pytest
from hassio.bootstrap import initialize_coresys
from tests.common import mock_coro
# pylint: disable=redefined-outer-name
@ -18,7 +20,10 @@ def docker():
@pytest.fixture
async def coresys(loop, docker):
"""Create a CoreSys Mock."""
with patch("hassio.bootstrap.initialize_system_data"):
with patch("hassio.bootstrap.initialize_system_data"), patch(
"hassio.bootstrap.fetch_timezone",
return_value=mock_coro(return_value="Europe/Zurich"),
):
coresys_obj = await initialize_coresys()
coresys_obj.ingress.save_data = MagicMock()