mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-25 18:16:32 +00:00
commit
f374852801
12
.devcontainer/Dockerfile
Normal file
12
.devcontainer/Dockerfile
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
FROM python:3.7
|
||||||
|
|
||||||
|
WORKDIR /workspace
|
||||||
|
|
||||||
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
|
COPY requirements.txt requirements_tests.txt /workspace/
|
||||||
|
RUN pip install -r requirements.txt \
|
||||||
|
&& pip3 install -r requirements_tests.txt \
|
||||||
|
&& pip install black tox
|
||||||
|
|
||||||
|
# Set the default shell to bash instead of sh
|
||||||
|
ENV SHELL /bin/bash
|
18
.devcontainer/devcontainer.json
Normal file
18
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||||
|
{
|
||||||
|
"name": "Hass.io dev",
|
||||||
|
"context": "..",
|
||||||
|
"dockerFile": "Dockerfile",
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.python"
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"python.pythonPath": "/usr/local/bin/python",
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true
|
||||||
|
}
|
||||||
|
}
|
10
Dockerfile
10
Dockerfile
@ -12,17 +12,23 @@ RUN apk add --no-cache \
|
|||||||
socat \
|
socat \
|
||||||
glib \
|
glib \
|
||||||
libstdc++ \
|
libstdc++ \
|
||||||
|
eudev \
|
||||||
eudev-libs
|
eudev-libs
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt /usr/src/
|
COPY requirements.txt /usr/src/
|
||||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||||
&& pip3 install --no-cache-dir --find-links https://wheels.hass.io/alpine-3.9/${BUILD_ARCH}/ \
|
&& pip3 install --no-cache-dir --find-links "https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
-r /usr/src/requirements.txt \
|
-r /usr/src/requirements.txt \
|
||||||
&& rm -f /usr/src/requirements.txt
|
&& rm -f /usr/src/requirements.txt
|
||||||
|
|
||||||
# Install HassIO
|
# Install HassIO
|
||||||
COPY . /usr/src/hassio
|
COPY . /usr/src/hassio
|
||||||
RUN pip3 install --no-cache-dir -e /usr/src/hassio
|
RUN pip3 install --no-cache-dir -e /usr/src/hassio \
|
||||||
|
&& python3 -m compileall /usr/src/hassio/hassio
|
||||||
|
|
||||||
|
# Initialize udev daemon, handle CMD
|
||||||
|
COPY entry.sh /bin/
|
||||||
|
ENTRYPOINT ["/bin/entry.sh"]
|
||||||
|
|
||||||
CMD [ "python3", "-m", "hassio" ]
|
CMD [ "python3", "-m", "hassio" ]
|
||||||
|
@ -14,183 +14,155 @@ trigger:
|
|||||||
pr:
|
pr:
|
||||||
- dev
|
- dev
|
||||||
variables:
|
variables:
|
||||||
|
- name: basePythonTag
|
||||||
|
value: '3.7-alpine3.10'
|
||||||
- name: versionHadolint
|
- name: versionHadolint
|
||||||
value: 'v1.16.3'
|
value: 'v1.16.3'
|
||||||
- name: versionBuilder
|
- name: versionBuilder
|
||||||
value: '3.2'
|
value: '4.4'
|
||||||
- name: versionWheels
|
- name: versionWheels
|
||||||
value: '0.6'
|
value: '0.11'
|
||||||
- group: docker
|
- group: docker
|
||||||
- group: wheels
|
- group: wheels
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
stages:
|
||||||
|
|
||||||
- job: 'Tox'
|
- stage: 'Test'
|
||||||
pool:
|
jobs:
|
||||||
vmImage: 'ubuntu-latest'
|
- job: 'Tox'
|
||||||
steps:
|
pool:
|
||||||
- task: UsePythonVersion@0
|
vmImage: 'ubuntu-latest'
|
||||||
displayName: 'Use Python 3.7'
|
steps:
|
||||||
inputs:
|
- task: UsePythonVersion@0
|
||||||
versionSpec: '3.7'
|
displayName: 'Use Python 3.7'
|
||||||
- script: pip install tox
|
inputs:
|
||||||
displayName: 'Install Tox'
|
versionSpec: '3.7'
|
||||||
- script: tox
|
- script: pip install tox
|
||||||
displayName: 'Run Tox'
|
displayName: 'Install Tox'
|
||||||
|
- script: tox
|
||||||
|
displayName: 'Run Tox'
|
||||||
|
- job: 'Black'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: 'Use Python $(python.version)'
|
||||||
|
inputs:
|
||||||
|
versionSpec: '3.7'
|
||||||
|
- script: pip install black
|
||||||
|
displayName: 'Install black'
|
||||||
|
- script: black --check hassio tests
|
||||||
|
displayName: 'Run Black'
|
||||||
|
- job: 'JQ'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- script: sudo apt-get install -y jq
|
||||||
|
displayName: 'Install JQ'
|
||||||
|
- bash: |
|
||||||
|
shopt -s globstar
|
||||||
|
cat **/*.json | jq '.'
|
||||||
|
displayName: 'Run JQ'
|
||||||
|
- job: 'Hadolint'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||||
|
displayName: 'Install Hadolint'
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -i \
|
||||||
|
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||||
|
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||||
|
displayName: 'Run Hadolint'
|
||||||
|
|
||||||
|
- stage: 'Wheels'
|
||||||
|
jobs:
|
||||||
|
- job: 'Wheels'
|
||||||
|
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||||
|
timeoutInMinutes: 360
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
strategy:
|
||||||
|
maxParallel: 3
|
||||||
|
matrix:
|
||||||
|
amd64:
|
||||||
|
buildArch: 'amd64'
|
||||||
|
i386:
|
||||||
|
buildArch: 'i386'
|
||||||
|
armhf:
|
||||||
|
buildArch: 'armhf'
|
||||||
|
armv7:
|
||||||
|
buildArch: 'armv7'
|
||||||
|
aarch64:
|
||||||
|
buildArch: 'aarch64'
|
||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends \
|
||||||
|
qemu-user-static \
|
||||||
|
binfmt-support \
|
||||||
|
curl
|
||||||
|
|
||||||
- job: 'Black'
|
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||||
pool:
|
sudo update-binfmts --enable qemu-arm
|
||||||
vmImage: 'ubuntu-latest'
|
sudo update-binfmts --enable qemu-aarch64
|
||||||
steps:
|
displayName: 'Initial cross build'
|
||||||
- task: UsePythonVersion@0
|
- script: |
|
||||||
displayName: 'Use Python $(python.version)'
|
mkdir -p .ssh
|
||||||
inputs:
|
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||||
versionSpec: '3.7'
|
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||||
- script: pip install black
|
chmod 600 .ssh/*
|
||||||
displayName: 'Install black'
|
displayName: 'Install ssh key'
|
||||||
- script: black --check hassio tests
|
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)-$(basePythonTag)
|
||||||
displayName: 'Run Black'
|
displayName: 'Install wheels builder'
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||||
|
homeassistant/$(buildArch)-wheels:$(versionWheels)-$(basePythonTag) \
|
||||||
|
--apk "build-base;libffi-dev;openssl-dev" \
|
||||||
|
--index $(wheelsIndex) \
|
||||||
|
--requirement requirements.txt \
|
||||||
|
--upload rsync \
|
||||||
|
--remote wheels@$(wheelsHost):/opt/wheels
|
||||||
|
displayName: 'Run wheels build'
|
||||||
|
|
||||||
|
- stage: 'Deploy'
|
||||||
|
jobs:
|
||||||
|
- job: 'VersionValidate'
|
||||||
|
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: 'Use Python 3.7'
|
||||||
|
inputs:
|
||||||
|
versionSpec: '3.7'
|
||||||
|
- script: |
|
||||||
|
setup_version="$(python setup.py -V)"
|
||||||
|
branch_version="$(Build.SourceBranchName)"
|
||||||
|
|
||||||
- job: 'JQ'
|
if [ "${branch_version}" == "dev" ]; then
|
||||||
pool:
|
exit 0
|
||||||
vmImage: 'ubuntu-latest'
|
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||||
steps:
|
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||||
- script: sudo apt-get install -y jq
|
exit 1
|
||||||
displayName: 'Install JQ'
|
fi
|
||||||
- bash: |
|
displayName: 'Check version of branch/tag'
|
||||||
shopt -s globstar
|
- job: 'Release'
|
||||||
cat **/*.json | jq '.'
|
dependsOn:
|
||||||
displayName: 'Run JQ'
|
- 'VersionValidate'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
- job: 'Hadolint'
|
steps:
|
||||||
pool:
|
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||||
vmImage: 'ubuntu-latest'
|
displayName: 'Docker hub login'
|
||||||
steps:
|
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
displayName: 'Install Builder'
|
||||||
displayName: 'Install Hadolint'
|
- script: |
|
||||||
- script: |
|
sudo docker run --rm --privileged \
|
||||||
sudo docker run --rm -i \
|
-v ~/.docker:/root/.docker \
|
||||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
homeassistant/amd64-builder:$(versionBuilder) \
|
||||||
displayName: 'Run Hadolint'
|
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||||
|
--all -t /data --docker-hub homeassistant
|
||||||
|
displayName: 'Build Release'
|
||||||
- job: 'Wheels'
|
|
||||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
|
||||||
timeoutInMinutes: 360
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-latest'
|
|
||||||
strategy:
|
|
||||||
maxParallel: 3
|
|
||||||
matrix:
|
|
||||||
amd64:
|
|
||||||
buildArch: 'amd64'
|
|
||||||
i386:
|
|
||||||
buildArch: 'i386'
|
|
||||||
armhf:
|
|
||||||
buildArch: 'armhf'
|
|
||||||
armv7:
|
|
||||||
buildArch: 'armv7'
|
|
||||||
aarch64:
|
|
||||||
buildArch: 'aarch64'
|
|
||||||
steps:
|
|
||||||
- script: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends \
|
|
||||||
qemu-user-static \
|
|
||||||
binfmt-support \
|
|
||||||
curl
|
|
||||||
|
|
||||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
|
||||||
sudo update-binfmts --enable qemu-arm
|
|
||||||
sudo update-binfmts --enable qemu-aarch64
|
|
||||||
displayName: 'Initial cross build'
|
|
||||||
- script: |
|
|
||||||
mkdir -p .ssh
|
|
||||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
|
||||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
|
||||||
chmod 600 .ssh/*
|
|
||||||
displayName: 'Install ssh key'
|
|
||||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
|
||||||
displayName: 'Install wheels builder'
|
|
||||||
- script: |
|
|
||||||
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/hassio/master/requirements.txt
|
|
||||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
|
||||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
|
||||||
--apk "build-base;libffi-dev;openssl-dev" \
|
|
||||||
--index $(wheelsIndex) \
|
|
||||||
--requirement requirements.txt \
|
|
||||||
--requirement-diff requirements_diff.txt \
|
|
||||||
--upload rsync \
|
|
||||||
--remote wheels@$(wheelsHost):/opt/wheels
|
|
||||||
displayName: 'Run wheels build'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'ReleaseDEV'
|
|
||||||
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('Wheels'))
|
|
||||||
dependsOn:
|
|
||||||
- 'JQ'
|
|
||||||
- 'Tox'
|
|
||||||
- 'Hadolint'
|
|
||||||
- 'Wheels'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-latest'
|
|
||||||
steps:
|
|
||||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
|
||||||
displayName: 'Docker hub login'
|
|
||||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
|
||||||
displayName: 'Install Builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm --privileged \
|
|
||||||
-v ~/.docker:/root/.docker \
|
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
|
||||||
--supervisor --all -t /data --version dev --docker-hub homeassistant
|
|
||||||
displayName: 'Build DEV'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'VersionValidate'
|
|
||||||
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-latest'
|
|
||||||
steps:
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
displayName: 'Use Python 3.7'
|
|
||||||
inputs:
|
|
||||||
versionSpec: '3.7'
|
|
||||||
- script: |
|
|
||||||
setup_version="$(python setup.py -V)"
|
|
||||||
branch_version="$(Build.SourceBranchName)"
|
|
||||||
|
|
||||||
if [ "${setup_version}" != "${branch_version}" ]; then
|
|
||||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
displayName: 'Check version of branch/tag'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'Release'
|
|
||||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('VersionValidate'))
|
|
||||||
dependsOn:
|
|
||||||
- 'JQ'
|
|
||||||
- 'Tox'
|
|
||||||
- 'Hadolint'
|
|
||||||
- 'VersionValidate'
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-latest'
|
|
||||||
steps:
|
|
||||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
|
||||||
displayName: 'Docker hub login'
|
|
||||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
|
||||||
displayName: 'Install Builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm --privileged \
|
|
||||||
-v ~/.docker:/root/.docker \
|
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
|
||||||
--supervisor --all -t /data --docker-hub homeassistant
|
|
||||||
displayName: 'Build Release'
|
|
||||||
|
13
entry.sh
Executable file
13
entry.sh
Executable file
@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
udevd --daemon
|
||||||
|
udevadm trigger
|
||||||
|
|
||||||
|
if CMD="$(command -v "$1")"; then
|
||||||
|
shift
|
||||||
|
exec "$CMD" "$@"
|
||||||
|
else
|
||||||
|
echo "Command not found: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
@ -39,12 +39,9 @@ from ..const import (
|
|||||||
ATTR_IMAGE,
|
ATTR_IMAGE,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_ENTRY,
|
ATTR_INGRESS_ENTRY,
|
||||||
|
ATTR_INGRESS_PANEL,
|
||||||
ATTR_INGRESS_PORT,
|
ATTR_INGRESS_PORT,
|
||||||
ATTR_INGRESS_TOKEN,
|
ATTR_INGRESS_TOKEN,
|
||||||
ATTR_INGRESS_PANEL,
|
|
||||||
ATTR_PANEL_ADMIN,
|
|
||||||
ATTR_PANEL_ICON,
|
|
||||||
ATTR_PANEL_TITLE,
|
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATON,
|
||||||
@ -53,6 +50,9 @@ from ..const import (
|
|||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_NETWORK,
|
ATTR_NETWORK,
|
||||||
ATTR_OPTIONS,
|
ATTR_OPTIONS,
|
||||||
|
ATTR_PANEL_ADMIN,
|
||||||
|
ATTR_PANEL_ICON,
|
||||||
|
ATTR_PANEL_TITLE,
|
||||||
ATTR_PORTS,
|
ATTR_PORTS,
|
||||||
ATTR_PORTS_DESCRIPTION,
|
ATTR_PORTS_DESCRIPTION,
|
||||||
ATTR_PRIVILEGED,
|
ATTR_PRIVILEGED,
|
||||||
|
@ -22,10 +22,11 @@ from .host import HostManager
|
|||||||
from .ingress import Ingress
|
from .ingress import Ingress
|
||||||
from .services import ServiceManager
|
from .services import ServiceManager
|
||||||
from .snapshots import SnapshotManager
|
from .snapshots import SnapshotManager
|
||||||
from .supervisor import Supervisor
|
|
||||||
from .store import StoreManager
|
from .store import StoreManager
|
||||||
|
from .supervisor import Supervisor
|
||||||
from .tasks import Tasks
|
from .tasks import Tasks
|
||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
|
from .utils.dt import fetch_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -66,6 +67,10 @@ async def initialize_coresys():
|
|||||||
if MACHINE_ID.exists():
|
if MACHINE_ID.exists():
|
||||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||||
|
|
||||||
|
# Init TimeZone
|
||||||
|
if coresys.config.timezone == "UTC":
|
||||||
|
coresys.config.timezone = await fetch_timezone(coresys.websession)
|
||||||
|
|
||||||
return coresys
|
return coresys
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,9 +3,6 @@ from datetime import datetime
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
import re
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_ADDONS_CUSTOM_LIST,
|
ATTR_ADDONS_CUSTOM_LIST,
|
||||||
@ -40,8 +37,6 @@ APPARMOR_DATA = PurePath("apparmor")
|
|||||||
|
|
||||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||||
|
|
||||||
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(JsonConfig):
|
class CoreConfig(JsonConfig):
|
||||||
"""Hold all core config data."""
|
"""Hold all core config data."""
|
||||||
@ -53,21 +48,7 @@ class CoreConfig(JsonConfig):
|
|||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
"""Return system timezone."""
|
"""Return system timezone."""
|
||||||
config_file = Path(self.path_homeassistant, "configuration.yaml")
|
return self._data[ATTR_TIMEZONE]
|
||||||
try:
|
|
||||||
assert config_file.exists()
|
|
||||||
configuration = config_file.read_text()
|
|
||||||
|
|
||||||
data = RE_TIMEZONE.search(configuration)
|
|
||||||
assert data
|
|
||||||
|
|
||||||
timezone = data.group("timezone")
|
|
||||||
pytz.timezone(timezone)
|
|
||||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
|
||||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
|
||||||
return self._data[ATTR_TIMEZONE]
|
|
||||||
|
|
||||||
return timezone
|
|
||||||
|
|
||||||
@timezone.setter
|
@timezone.setter
|
||||||
def timezone(self, value):
|
def timezone(self, value):
|
||||||
|
@ -3,7 +3,7 @@ from pathlib import Path
|
|||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
|
|
||||||
HASSIO_VERSION = "166"
|
HASSIO_VERSION = "167"
|
||||||
|
|
||||||
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
|
||||||
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
|
||||||
|
@ -12,7 +12,7 @@ from .const import (
|
|||||||
STARTUP_APPLICATION,
|
STARTUP_APPLICATION,
|
||||||
STARTUP_INITIALIZE,
|
STARTUP_INITIALIZE,
|
||||||
)
|
)
|
||||||
from .exceptions import HassioError, HomeAssistantError
|
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -75,10 +75,16 @@ class HassIO(CoreSysAttributes):
|
|||||||
"""Start Hass.io orchestration."""
|
"""Start Hass.io orchestration."""
|
||||||
# on release channel, try update itself
|
# on release channel, try update itself
|
||||||
if self.sys_supervisor.need_update:
|
if self.sys_supervisor.need_update:
|
||||||
if self.sys_dev:
|
try:
|
||||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
if self.sys_dev:
|
||||||
elif await self.sys_supervisor.update():
|
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||||
return
|
else:
|
||||||
|
await self.sys_supervisor.update()
|
||||||
|
except SupervisorUpdateError:
|
||||||
|
_LOGGER.fatal(
|
||||||
|
"Can't update supervisor! This will break some Add-ons or affect "
|
||||||
|
"future version of Home Assistant!"
|
||||||
|
)
|
||||||
|
|
||||||
# start api
|
# start api
|
||||||
await self.sys_api.start()
|
await self.sys_api.start()
|
||||||
|
@ -85,11 +85,16 @@ class Discovery(CoreSysAttributes, JsonConfig):
|
|||||||
message = Message(addon.slug, service, config)
|
message = Message(addon.slug, service, config)
|
||||||
|
|
||||||
# Already exists?
|
# Already exists?
|
||||||
for old_message in self.list_messages:
|
for exists_msg in self.list_messages:
|
||||||
if old_message != message:
|
if exists_msg != message:
|
||||||
continue
|
continue
|
||||||
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
|
if exists_msg.config != config:
|
||||||
return old_message
|
message = exists_msg
|
||||||
|
message.config = config
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Duplicate discovery message from %s", addon.slug)
|
||||||
|
return exists_msg
|
||||||
|
break
|
||||||
|
|
||||||
_LOGGER.info("Send discovery to Home Assistant %s from %s", service, addon.slug)
|
_LOGGER.info("Send discovery to Home Assistant %s from %s", service, addon.slug)
|
||||||
self.message_obj[message.uuid] = message
|
self.message_obj[message.uuid] = message
|
||||||
|
@ -40,14 +40,6 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
"""Return timeout for Docker actions."""
|
"""Return timeout for Docker actions."""
|
||||||
return 60
|
return 60
|
||||||
|
|
||||||
@property
|
|
||||||
def devices(self):
|
|
||||||
"""Create list of special device to map into Docker."""
|
|
||||||
devices = []
|
|
||||||
for device in self.sys_hardware.serial_devices:
|
|
||||||
devices.append(f"{device}:{device}:rwm")
|
|
||||||
return devices or None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ip_address(self) -> IPv4Address:
|
def ip_address(self) -> IPv4Address:
|
||||||
"""Return IP address of this container."""
|
"""Return IP address of this container."""
|
||||||
@ -73,7 +65,6 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
detach=True,
|
detach=True,
|
||||||
privileged=True,
|
privileged=True,
|
||||||
init=True,
|
init=True,
|
||||||
devices=self.devices,
|
|
||||||
network_mode="host",
|
network_mode="host",
|
||||||
environment={
|
environment={
|
||||||
"HASSIO": self.sys_docker.network.supervisor,
|
"HASSIO": self.sys_docker.network.supervisor,
|
||||||
@ -106,7 +97,6 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
command,
|
command,
|
||||||
privileged=True,
|
privileged=True,
|
||||||
init=True,
|
init=True,
|
||||||
devices=self.devices,
|
|
||||||
detach=True,
|
detach=True,
|
||||||
stdout=True,
|
stdout=True,
|
||||||
stderr=True,
|
stderr=True,
|
||||||
|
@ -36,9 +36,15 @@ class Hardware:
|
|||||||
"""Return all serial and connected devices."""
|
"""Return all serial and connected devices."""
|
||||||
dev_list = set()
|
dev_list = set()
|
||||||
for device in self.context.list_devices(subsystem="tty"):
|
for device in self.context.list_devices(subsystem="tty"):
|
||||||
if "ID_VENDOR" in device or RE_TTY.search(device.device_node):
|
if "ID_VENDOR" in device.properties or RE_TTY.search(device.device_node):
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
|
# Add /dev/serial/by-id devlink for current device
|
||||||
|
for dev_link in device.device_links:
|
||||||
|
if not dev_link.startswith("/dev/serial/by-id"):
|
||||||
|
continue
|
||||||
|
dev_list.add(dev_link)
|
||||||
|
|
||||||
return dev_list
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -46,8 +52,8 @@ class Hardware:
|
|||||||
"""Return all input devices."""
|
"""Return all input devices."""
|
||||||
dev_list = set()
|
dev_list = set()
|
||||||
for device in self.context.list_devices(subsystem="input"):
|
for device in self.context.list_devices(subsystem="input"):
|
||||||
if "NAME" in device:
|
if "NAME" in device.properties:
|
||||||
dev_list.add(device["NAME"].replace('"', ""))
|
dev_list.add(device.properties["NAME"].replace('"', ""))
|
||||||
|
|
||||||
return dev_list
|
return dev_list
|
||||||
|
|
||||||
@ -56,7 +62,7 @@ class Hardware:
|
|||||||
"""Return all disk devices."""
|
"""Return all disk devices."""
|
||||||
dev_list = set()
|
dev_list = set()
|
||||||
for device in self.context.list_devices(subsystem="block"):
|
for device in self.context.list_devices(subsystem="block"):
|
||||||
if device.device_node.startswith("/dev/sd"):
|
if "ID_NAME" in device.properties:
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
return dev_list
|
return dev_list
|
||||||
|
@ -60,6 +60,7 @@ class AsyncThrottle:
|
|||||||
def check_port(address: IPv4Address, port: int) -> bool:
|
def check_port(address: IPv4Address, port: int) -> bool:
|
||||||
"""Check if port is mapped."""
|
"""Check if port is mapped."""
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.settimeout(0.5)
|
||||||
try:
|
try:
|
||||||
result = sock.connect_ex((str(address), port))
|
result = sock.connect_ex((str(address), port))
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -1,13 +1,17 @@
|
|||||||
"""Tools file for Hass.io."""
|
"""Tools file for Hass.io."""
|
||||||
|
import asyncio
|
||||||
from datetime import datetime, timedelta, timezone, tzinfo
|
from datetime import datetime, timedelta, timezone, tzinfo
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
UTC = pytz.utc
|
UTC = pytz.utc
|
||||||
|
|
||||||
|
GEOIP_URL = "http://ip-api.com/json/"
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -22,6 +26,21 @@ DATETIME_RE = re.compile(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_timezone(websession):
|
||||||
|
"""Read timezone from freegeoip."""
|
||||||
|
data = {}
|
||||||
|
try:
|
||||||
|
async with websession.get(GEOIP_URL, timeout=10) as request:
|
||||||
|
data = await request.json()
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||||
|
_LOGGER.warning("Can't fetch freegeoip data: %s", err)
|
||||||
|
except ValueError as err:
|
||||||
|
_LOGGER.warning("Error on parse freegeoip data: %s", err)
|
||||||
|
|
||||||
|
return data.get("timezone", "UTC")
|
||||||
|
|
||||||
|
|
||||||
# Copyright (c) Django Software Foundation and individual contributors.
|
# Copyright (c) Django Software Foundation and individual contributors.
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
# https://github.com/django/django/blob/master/LICENSE
|
# https://github.com/django/django/blob/master/LICENSE
|
||||||
|
@ -5,7 +5,7 @@ cchardet==2.1.4
|
|||||||
colorlog==4.0.2
|
colorlog==4.0.2
|
||||||
cpe==1.2.1
|
cpe==1.2.1
|
||||||
cryptography==2.7
|
cryptography==2.7
|
||||||
docker==4.0.1
|
docker==4.0.2
|
||||||
gitpython==2.1.11
|
gitpython==2.1.11
|
||||||
pytz==2019.1
|
pytz==2019.1
|
||||||
pyudev==0.21.0
|
pyudev==0.21.0
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
flake8==3.7.7
|
flake8==3.7.7
|
||||||
pylint==2.3.1
|
pylint==2.3.1
|
||||||
pytest==4.6.2
|
pytest==4.6.3
|
||||||
pytest-timeout==1.3.3
|
pytest-timeout==1.3.3
|
||||||
pytest-aiohttp==0.3.0
|
pytest-aiohttp==0.3.0
|
||||||
|
@ -7,3 +7,20 @@ def load_json_fixture(filename):
|
|||||||
"""Load a fixture."""
|
"""Load a fixture."""
|
||||||
path = Path(Path(__file__).parent.joinpath("fixtures"), filename)
|
path = Path(Path(__file__).parent.joinpath("fixtures"), filename)
|
||||||
return json.loads(path.read_text())
|
return json.loads(path.read_text())
|
||||||
|
|
||||||
|
|
||||||
|
def mock_coro(return_value=None, exception=None):
|
||||||
|
"""Return a coro that returns a value or raise an exception."""
|
||||||
|
return mock_coro_func(return_value, exception)()
|
||||||
|
|
||||||
|
|
||||||
|
def mock_coro_func(return_value=None, exception=None):
|
||||||
|
"""Return a method to create a coro function that returns a value."""
|
||||||
|
|
||||||
|
async def coro(*args, **kwargs):
|
||||||
|
"""Fake coroutine."""
|
||||||
|
if exception:
|
||||||
|
raise exception
|
||||||
|
return return_value
|
||||||
|
|
||||||
|
return coro
|
||||||
|
@ -5,6 +5,8 @@ import pytest
|
|||||||
|
|
||||||
from hassio.bootstrap import initialize_coresys
|
from hassio.bootstrap import initialize_coresys
|
||||||
|
|
||||||
|
from tests.common import mock_coro
|
||||||
|
|
||||||
# pylint: disable=redefined-outer-name
|
# pylint: disable=redefined-outer-name
|
||||||
|
|
||||||
|
|
||||||
@ -18,7 +20,10 @@ def docker():
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def coresys(loop, docker):
|
async def coresys(loop, docker):
|
||||||
"""Create a CoreSys Mock."""
|
"""Create a CoreSys Mock."""
|
||||||
with patch("hassio.bootstrap.initialize_system_data"):
|
with patch("hassio.bootstrap.initialize_system_data"), patch(
|
||||||
|
"hassio.bootstrap.fetch_timezone",
|
||||||
|
return_value=mock_coro(return_value="Europe/Zurich"),
|
||||||
|
):
|
||||||
coresys_obj = await initialize_coresys()
|
coresys_obj = await initialize_coresys()
|
||||||
|
|
||||||
coresys_obj.ingress.save_data = MagicMock()
|
coresys_obj.ingress.save_data = MagicMock()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user