Compare commits

..

No commits in common. "main" and "140" have entirely different histories.
main ... 140

3813 changed files with 13190 additions and 96434 deletions

View File

@ -1,51 +0,0 @@
{
"name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup",
"postStartCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github"
],
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
}
}
},
"mounts": [
"type=volume,target=/var/lib/docker",
"type=volume,target=/mnt/supervisor"
]
}

View File

@ -1,23 +1,13 @@
# General files
.git
.github
.devcontainer
.vscode
# Test related files
.tox
# Temporary files
**/__pycache__
.pytest_cache
# virtualenv
venv/
# Data
home-assistant-polymer/
script/
tests/
# Test ENV
data/
ENV/

View File

@ -1,96 +0,0 @@
name: Report an issue with Home Assistant Supervisor
description: Report an issue related to the Home Assistant Supervisor.
body:
- type: markdown
attributes:
value: |
This issue form is for reporting bugs with **supported** setups only!
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
[fr]: https://github.com/orgs/home-assistant/discussions
- type: textarea
validations:
required: true
attributes:
label: Describe the issue you are experiencing
description: Provide a clear and concise description of what the bug is.
- type: markdown
attributes:
value: |
## Environment
- type: dropdown
validations:
required: true
attributes:
label: What type of installation are you running?
description: >
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
It is listed as the `Installation Type` value.
options:
- Home Assistant OS
- Home Assistant Supervised
- type: dropdown
validations:
required: true
attributes:
label: Which operating system are you running on?
options:
- Home Assistant Operating System
- Debian
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
- type: markdown
attributes:
value: |
# Details
- type: textarea
validations:
required: true
attributes:
label: Steps to reproduce the issue
description: |
Please tell us exactly how to reproduce your issue.
Provide clear and concise step by step instructions and add code snippets if needed.
value: |
1.
2.
3.
...
- type: textarea
validations:
required: true
attributes:
label: Anything in the Supervisor logs that might be useful for us?
description: >
Supervisor Logs can be found in [Settings -> System -> Logs](https://my.home-assistant.io/redirect/logs/)
then choose `Supervisor` in the top right.
[![Open your Home Assistant instance and show your Supervisor system logs.](https://my.home-assistant.io/badges/supervisor_logs.svg)](https://my.home-assistant.io/redirect/supervisor_logs/)
render: txt
- type: textarea
validations:
required: true
attributes:
label: System information
description: >
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
Click the copy button at the bottom of the pop-up and paste it here.
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
- type: textarea
attributes:
label: Supervisor diagnostics
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >-
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
and select 'Download diagnostics'.
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
- type: textarea
attributes:
label: Additional information
description: >
If you have any additional information for us, use the field below.
Please note, you can attach screenshots or screen recordings here, by
dragging and dropping files in the field below.

View File

@ -1,25 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Report a bug/issues with an unsupported Supervisor
url: https://community.home-assistant.io
about: The Community guide can help or was updated to solve your issue
- name: Report a bug for the Supervisor panel
url: https://github.com/home-assistant/frontend/issues
about: The Supervisor panel is a part of the Home Assistant frontend
- name: Report incorrect or missing information on our developer documentation
url: https://github.com/home-assistant/developers.home-assistant.io/issues
about: Our documentation has its own issue tracker. Please report issues with the website there.
- name: Request a feature for the Supervisor
url: https://github.com/orgs/home-assistant/discussions
about: Request an new feature for the Supervisor.
- name: I have a question or need support
url: https://www.home-assistant.io/help
about: We use GitHub for tracking bugs, check our website for resources on getting help.
- name: I'm unsure where to go?
url: https://www.home-assistant.io/join-chat
about: If you are unsure where to go, then joining our chat is recommended; Just ask!

View File

@ -1,53 +0,0 @@
name: Task
description: For staff only - Create a task
type: Task
body:
- type: markdown
attributes:
value: |
## ⚠️ RESTRICTED ACCESS
**This form is restricted to Open Home Foundation staff and authorized contributors only.**
If you are a community member wanting to contribute, please:
- For bug reports: Use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
---
### For authorized contributors
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
- type: textarea
id: description
attributes:
label: Description
description: |
Provide a clear and detailed description of the task that needs to be accomplished.
Be specific about what needs to be done, why it's important, and any constraints or requirements.
placeholder: |
Describe the task, including:
- What needs to be done
- Why this task is needed
- Expected outcome
- Any constraints or requirements
validations:
required: true
- type: textarea
id: additional_context
attributes:
label: Additional context
description: |
Any additional information, links, research, or context that would be helpful.
Include links to related issues, research, prototypes, roadmap opportunities etc.
placeholder: |
- Roadmap opportunity: [link]
- Epic: [link]
- Feature request: [link]
- Technical design documents: [link]
- Prototype/mockup: [link]
- Dependencies: [links]
validations:
required: false

View File

@ -1,74 +0,0 @@
<!--
You are amazing! Thanks for contributing to our project!
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
-->
## Proposed change
<!--
Describe the big picture of your changes here to communicate to the
maintainers why we should accept this pull request. If it fixes a bug
or resolves a feature request, be sure to link to that issue in the
additional information section.
-->
## Type of change
<!--
What type of change does your PR introduce to Home Assistant?
NOTE: Please, check only 1! box!
If your PR requires multiple boxes to be checked, you'll most likely need to
split it into multiple PRs. This makes things easier and faster to code review.
-->
- [ ] Dependency upgrade
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (which adds functionality to the supervisor)
- [ ] Breaking change (fix/feature causing existing functionality to break)
- [ ] Code quality improvements to existing code or addition of tests
## Additional information
<!--
Details are important, and help maintainers processing your PR.
Please be sure to fill out additional details, if applicable.
-->
- This PR fixes or closes issue: fixes #
- This PR is related to issue:
- Link to documentation pull request:
- Link to cli pull request:
- Link to client library pull request:
## Checklist
<!--
Put an `x` in the boxes that apply. You can also fill these out after
creating the PR. If you're unsure about any of them, don't hesitate to ask.
We're here to help! This is simply a reminder of what we are going to look
for before merging your code.
-->
- [ ] The code change is tested and works locally.
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
- [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!--
Thank you for contributing <3
Below, some useful links you could explore:
-->
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@ -1,288 +0,0 @@
# GitHub Copilot & Claude Code Instructions
This repository contains the Home Assistant Supervisor, a Python 3 based container
orchestration and management system for Home Assistant.
## Supervisor Capabilities & Features
### Architecture Overview
Home Assistant Supervisor is a Python-based container orchestration system that
communicates with the Docker daemon to manage containerized components. It is tightly
integrated with the underlying Operating System and core Operating System components
through D-Bus.
**Managed Components:**
- **Home Assistant Core**: The main home automation application running in its own
container (also provides the web interface)
- **Add-ons**: Third-party applications and services (each add-on runs in its own
container)
- **Plugins**: Built-in system services like DNS, Audio, CLI, Multicast, and Observer
- **Host System Integration**: OS-level operations and hardware access via D-Bus
- **Container Networking**: Internal Docker network management and external
connectivity
- **Storage & Backup**: Data persistence and backup management across all containers
**Key Dependencies:**
- **Docker Engine**: Required for all container operations
- **D-Bus**: System-level communication with the host OS
- **systemd**: Service management for host system operations
- **NetworkManager**: Network configuration and management
### Add-on System
**Add-on Architecture**: Add-ons are containerized applications available through
add-on stores. Each store contains multiple add-ons, and each add-on includes metadata
that tells Supervisor the version, startup configuration (permissions), and available
user configurable options. Add-on metadata typically references a container image that
Supervisor fetches during installation. If not, the Supervisor builds the container
image from a Dockerfile.
**Built-in Stores**: Supervisor comes with several pre-configured stores:
- **Core Add-ons**: Official add-ons maintained by the Home Assistant team
- **Community Add-ons**: Popular third-party add-ons repository
- **ESPHome**: Add-ons for ESPHome ecosystem integration
- **Music Assistant**: Audio and music-related add-ons
- **Local Development**: Local folder for testing custom add-ons during development
**Store Management**: Stores are Git-based repositories that are periodically updated.
When updates are available, users receive notifications.
**Add-on Lifecycle**:
- **Installation**: Supervisor fetches or builds container images based on add-on
metadata
- **Configuration**: Schema-validated options with integrated UI management
- **Runtime**: Full container lifecycle management, health monitoring
- **Updates**: Automatic or manual version management
### Update System
**Core Components**: Supervisor, Home Assistant Core, HAOS, and built-in plugins
receive version information from a central JSON file fetched from
`https://version.home-assistant.io/{channel}.json`. The `Updater` class handles
fetching this data, validating signatures, and updating internal version tracking.
**Update Channels**: Three channels (`stable`/`beta`/`dev`) determine which version
JSON file is fetched, allowing users to opt into different release streams.
**Add-on Updates**: Add-on version information comes from store repository updates, not
the central JSON file. When repositories are refreshed via the store system, add-ons
compare their local versions against repository versions to determine update
availability.
### Backup & Recovery System
**Backup Capabilities**:
- **Full Backups**: Complete system state capture including all add-ons,
configuration, and data
- **Partial Backups**: Selective backup of specific components (Home Assistant,
add-ons, folders)
- **Encrypted Backups**: Optional backup encryption with user-provided passwords
- **Multiple Storage Locations**: Local storage and remote backup destinations
**Recovery Features**:
- **One-click Restore**: Simple restoration from backup files
- **Selective Restore**: Choose specific components to restore
- **Automatic Recovery**: Self-healing for common system issues
---
## Supervisor Development
### Python Requirements
- **Compatibility**: Python 3.13+
- **Language Features**: Use modern Python features:
- Type hints with `typing` module
- f-strings (preferred over `%` or `.format()`)
- Dataclasses and enum classes
- Async/await patterns
- Pattern matching where appropriate
### Code Quality Standards
- **Formatting**: Ruff
- **Linting**: PyLint and Ruff
- **Type Checking**: MyPy
- **Testing**: pytest with asyncio support
- **Language**: American English for all code, comments, and documentation
### Code Organization
**Core Structure**:
```
supervisor/
├── __init__.py # Package initialization
├── const.py # Constants and enums
├── coresys.py # Core system management
├── bootstrap.py # System initialization
├── exceptions.py # Custom exception classes
├── api/ # REST API endpoints
├── addons/ # Add-on management
├── backups/ # Backup system
├── docker/ # Docker integration
├── host/ # Host system interface
├── homeassistant/ # Home Assistant Core management
├── dbus/ # D-Bus system integration
├── hardware/ # Hardware detection and management
├── plugins/ # Plugin system
├── resolution/ # Issue detection and resolution
├── security/ # Security management
├── services/ # Service discovery and management
├── store/ # Add-on store management
└── utils/ # Utility functions
```
**Shared Constants**: Use constants from `supervisor/const.py` instead of hardcoding
values. Define new constants following existing patterns and group related constants
together.
### Supervisor Architecture Patterns
**CoreSysAttributes Inheritance Pattern**: Nearly all major classes in Supervisor
inherit from `CoreSysAttributes`, providing access to the centralized system state
via `self.coresys` and convenient `sys_*` properties.
```python
# Standard Supervisor class pattern
class MyManager(CoreSysAttributes):
"""Manage my functionality."""
def __init__(self, coresys: CoreSys):
"""Initialize manager."""
self.coresys: CoreSys = coresys
self._component: MyComponent = MyComponent(coresys)
@property
def component(self) -> MyComponent:
"""Return component handler."""
return self._component
# Access system components via inherited properties
async def do_something(self):
await self.sys_docker.containers.get("my_container")
self.sys_bus.fire_event(BusEvent.MY_EVENT, {"data": "value"})
```
**Key Inherited Properties from CoreSysAttributes**:
- `self.sys_docker` - Docker API access
- `self.sys_run_in_executor()` - Execute blocking operations
- `self.sys_create_task()` - Create async tasks
- `self.sys_bus` - Event bus for system events
- `self.sys_config` - System configuration
- `self.sys_homeassistant` - Home Assistant Core management
- `self.sys_addons` - Add-on management
- `self.sys_host` - Host system access
- `self.sys_dbus` - D-Bus system interface
**Load Pattern**: Many components implement a `load()` method which effectively
initialize the component from external sources (containers, files, D-Bus services).
### API Development
**REST API Structure**:
- **Base Path**: `/api/` for all endpoints
- **Authentication**: Bearer token authentication
- **Consistent Response Format**: `{"result": "ok", "data": {...}}` or
`{"result": "error", "message": "..."}`
- **Validation**: Use voluptuous schemas with `api_validate()`
**Use `@api_process` Decorator**: This decorator handles all standard error handling
and response formatting automatically. The decorator catches `APIError`, `HassioError`,
and other exceptions, returning appropriate HTTP responses.
```python
from ..api.utils import api_process, api_validate
@api_process
async def backup_full(self, request: web.Request) -> dict[str, Any]:
"""Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request)
job = await self.sys_backups.do_backup_full(**body)
return {ATTR_JOB_ID: job.uuid}
```
### Docker Integration
- **Container Management**: Use Supervisor's Docker manager instead of direct
Docker API
- **Networking**: Supervisor manages internal Docker networks with predefined IP
ranges
- **Security**: AppArmor profiles, capability restrictions, and user namespace
isolation
- **Health Checks**: Implement health monitoring for all managed containers
### D-Bus Integration
- **Use dbus-fast**: Async D-Bus library for system integration
- **Service Management**: systemd, NetworkManager, hostname management
- **Error Handling**: Wrap D-Bus exceptions in Supervisor-specific exceptions
### Async Programming
- **All I/O operations must be async**: File operations, network calls, subprocess
execution
- **Use asyncio patterns**: Prefer `asyncio.gather()` over sequential awaits
- **Executor jobs**: Use `self.sys_run_in_executor()` for blocking operations
- **Two-phase initialization**: `__init__` for sync setup, `post_init()` for async
initialization
### Testing
- **Location**: `tests/` directory with module mirroring
- **Fixtures**: Extensive use of pytest fixtures for CoreSys setup
- **Mocking**: Mock external dependencies (Docker, D-Bus, network calls)
- **Coverage**: Minimum 90% test coverage, 100% for security-sensitive code
### Error Handling
- **Custom Exceptions**: Defined in `exceptions.py` with clear inheritance hierarchy
- **Error Propagation**: Use `from` clause for exception chaining
- **API Errors**: Use `APIError` with appropriate HTTP status codes
### Security Considerations
- **Container Security**: AppArmor profiles mandatory for add-ons, minimal
capabilities
- **Authentication**: Token-based API authentication with role-based access
- **Data Protection**: Backup encryption, secure secret management, comprehensive
input validation
### Development Commands
```bash
# Run tests, adjust paths as necessary
pytest -qsx tests/
# Linting and formatting
ruff check supervisor/
ruff format supervisor/
# Type checking
mypy --ignore-missing-imports supervisor/
# Pre-commit hooks
pre-commit run --all-files
```
Always run the pre-commit hooks at the end of code editing.
### Common Patterns to Follow
**✅ Use These Patterns**:
- Inherit from `CoreSysAttributes` for system access
- Use `@api_process` decorator for API endpoints
- Use `self.sys_run_in_executor()` for blocking operations
- Access Docker via `self.sys_docker` not direct Docker API
- Use constants from `const.py` instead of hardcoding
- Store types in (per-module) `const.py` (e.g. supervisor/store/const.py)
**❌ Avoid These Patterns**:
- Direct Docker API usage - use Supervisor's Docker manager
- Blocking operations in async context (use asyncio alternatives)
- Hardcoded values - use constants from `const.py`
- Manual error handling in API endpoints - let `@api_process` handle it
This guide provides the foundation for contributing to Home Assistant Supervisor.
Follow these patterns and guidelines to ensure code quality, security, and
maintainability.

View File

@ -1,14 +0,0 @@
version: 2
updates:
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: "06:00"
open-pull-requests-limit: 10
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "06:00"
open-pull-requests-limit: 10

View File

@ -1,50 +1,4 @@
change-template: "- #$NUMBER $TITLE @$AUTHOR"
sort-direction: ascending
categories:
- title: ":boom: Breaking Changes"
label: "breaking-change"
- title: ":wrench: Build"
label: "build"
- title: ":boar: Chore"
label: "chore"
- title: ":sparkles: New Features"
label: "new-feature"
- title: ":zap: Performance"
label: "performance"
- title: ":recycle: Refactor"
label: "refactor"
- title: ":green_heart: CI"
label: "ci"
- title: ":bug: Bug Fixes"
label: "bugfix"
- title: ":white_check_mark: Test"
label: "test"
- title: ":arrow_up: Dependency Updates"
label: "dependencies"
collapse-after: 1
include-labels:
- "breaking-change"
- "build"
- "chore"
- "performance"
- "refactor"
- "new-feature"
- "bugfix"
- "dependencies"
- "test"
- "ci"
template: |
## What's Changed
$CHANGES

View File

@ -1,380 +0,0 @@
name: Build supervisor
on:
workflow_dispatch:
inputs:
channel:
description: "Channel"
required: true
default: "dev"
version:
description: "Version"
required: true
publish:
description: "Publish"
required: true
default: "false"
stable:
description: "Stable"
required: true
default: "false"
pull_request:
branches: ["main"]
release:
types: ["published"]
push:
branches: ["main"]
paths:
- "rootfs/**"
- "supervisor/**"
- build.yaml
- Dockerfile
- requirements.txt
- setup.py
env:
DEFAULT_PYTHON: "3.13"
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
init:
name: Initialize build
runs-on: ubuntu-latest
outputs:
architectures: ${{ steps.info.outputs.architectures }}
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.version.outputs.channel }}
publish: ${{ steps.version.outputs.publish }}
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
with:
fetch-depth: 0
- name: Get information
id: info
uses: home-assistant/actions/helpers/info@master
- name: Get version
id: version
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Get changed files
id: changed_files
if: steps.version.outputs.publish == 'false'
uses: masesgroup/retrieve-changed-files@v3.0.0
- name: Check if requirements files changed
id: requirements
run: |
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
build:
name: Build ${{ matrix.arch }} supervisor
needs: init
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
strategy:
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
with:
fetch-depth: 0
- name: Write env-file
if: needs.init.outputs.requirements == 'true'
run: |
(
# Fix out of memory issues with rust
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
) > .env_file
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2025.07.0
with:
abi: cp313
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
apk: "libffi-dev;openssl-dev;yaml-dev"
skip-binary: aiohttp
env-file: true
requirements: "requirements.txt"
- name: Set version
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.9.2
with:
cosign-release: "v2.4.3"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
run: |
pip3 install setuptools dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "${dir_hash}" > rootfs/supervisor.sha256
- name: Sign supervisor SHA256
if: needs.init.outputs.publish == 'true'
run: |
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set build arguments
if: needs.init.outputs.publish == 'false'
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2025.03.0
with:
args: |
$BUILD_ARGS \
--${{ matrix.arch }} \
--target /data \
--cosign \
--generic ${{ needs.init.outputs.version }}
env:
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
version:
name: Update version
needs: ["init", "run_supervisor"]
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.2
- name: Initialize git
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/git-init@master
with:
name: ${{ secrets.GIT_NAME }}
email: ${{ secrets.GIT_EMAIL }}
token: ${{ secrets.GIT_TOKEN }}
- name: Update version file
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version-push@master
with:
key: ${{ env.BUILD_NAME }}
version: ${{ needs.init.outputs.version }}
channel: ${{ needs.init.outputs.channel }}
run_supervisor:
runs-on: ubuntu-latest
name: Run the Supervisor
needs: ["build", "init"]
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2025.03.0
with:
args: |
--test \
--amd64 \
--target /data \
--generic runner
- name: Pull Supervisor
if: needs.init.outputs.publish == 'true'
run: |
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
- name: Create the Supervisor
run: |
mkdir -p /tmp/supervisor/data
docker create --name hassio_supervisor \
--privileged \
--security-opt seccomp=unconfined \
--security-opt apparmor=unconfined \
-v /run/docker.sock:/run/docker.sock \
-v /run/dbus:/run/dbus \
-v /tmp/supervisor/data:/data \
-v /etc/machine-id:/etc/machine-id:ro \
-e SUPERVISOR_SHARE="/tmp/supervisor/data" \
-e SUPERVISOR_NAME=hassio_supervisor \
-e SUPERVISOR_DEV=1 \
-e SUPERVISOR_MACHINE="qemux86-64" \
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
- name: Start the Supervisor
run: docker start hassio_supervisor
- name: Wait for Supervisor to come up
run: |
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
ping="error"
while [ "$ping" != "ok" ]; do
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
sleep 5
done
- name: Check the Supervisor
run: |
echo "Checking supervisor info"
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
echo "Checking supervisor network info"
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Check the Store / Addon
run: |
echo "Install Core SSH Add-on"
test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1
fi
echo "Start Core SSH Add-on"
test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure its state is started
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
if [ "$test" != "started" ]; then
exit 1
fi
- name: Check the Supervisor code sign
if: needs.init.outputs.publish == 'true'
run: |
echo "Enable Content-Trust"
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
echo "Run supervisor health check"
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
echo "Check supervisor unhealthy"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
if [ "$test" != "" ]; then
exit 1
fi
echo "Check supervisor supported"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unsupported[]')
if [[ "$test" =~ source_mods ]]; then
exit 1
fi
- name: Create full backup
id: backup
run: |
test=$(docker exec hassio_cli ha backups new --no-progress --raw-json)
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1
fi
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
- name: Uninstall SSH add-on
run: |
test=$(docker exec hassio_cli ha addons uninstall core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Restart supervisor
run: |
test=$(docker exec hassio_cli ha supervisor restart --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Wait for Supervisor to come up
run: |
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
ping="error"
while [ "$ping" != "ok" ]; do
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
sleep 5
done
- name: Restore SSH add-on from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --addons core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1
fi
# Make sure its state is started
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
if [ "$test" != "started" ]; then
exit 1
fi
- name: Restore SSL directory from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Get supervisor logs on failiure
if: ${{ cancelled() || failure() }}
run: docker logs hassio_supervisor

View File

@ -1,19 +0,0 @@
name: Check PR
on:
pull_request:
branches: ["main"]
types: [labeled, unlabeled, synchronize]
jobs:
init:
name: Check labels
runs-on: ubuntu-latest
steps:
- name: Check labels
run: |
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
echo "$labels"
if [ "$labels" == "cla-signed" ]; then
exit 1
fi

View File

@ -1,428 +0,0 @@
name: CI
# yamllint disable-line rule:truthy
on:
push:
branches:
- main
pull_request: ~
env:
DEFAULT_PYTHON: "3.13"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
MYPY_CACHE_VERSION: 1
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
# Separate job to pre-populate the base dependency cache
# This prevent upcoming jobs to do the same individually
prepare:
runs-on: ubuntu-latest
outputs:
python-version: ${{ steps.python.outputs.python-version }}
name: Prepare Python dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python
id: python
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
python -m venv venv
. venv/bin/activate
pip install -U pip setuptools
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
${{ runner.os }}-pre-commit-
- name: Install pre-commit dependencies
if: steps.cache-precommit.outputs.cache-hit != 'true'
run: |
. venv/bin/activate
pre-commit install-hooks
lint-ruff-format:
name: Check ruff-format
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff-format
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-dockerfile:
name: Check Dockerfile
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
- name: Check Dockerfile
uses: docker://hadolint/hadolint:v1.18.0
with:
args: hadolint Dockerfile
lint-executable-shebangs:
name: Check executables
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register check executables problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
- name: Run executables check
run: |
. venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register check-json problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/check-json.json"
- name: Run check-json
run: |
. venv/bin/activate
pre-commit run --hook-stage manual check-json --all-files
lint-pylint:
name: Check pylint
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0
- name: Register pylint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json"
- name: Run pylint
run: |
. venv/bin/activate
pylint supervisor tests
mypy:
name: Check mypy
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Generate partial mypy restore key
id: generate-mypy-key
run: |
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
echo "version=$mypy_version" >> $GITHUB_OUTPUT
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: >-
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore mypy cache
uses: actions/cache@v4.2.3
with:
path: .mypy_cache
key: >-
${{ runner.os }}-mypy-${{ needs.prepare.outputs.python-version }}-${{ steps.generate-mypy-key.outputs.key }}
restore-keys: >-
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-mypy-${{ env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}
- name: Register mypy problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/mypy.json"
- name: Run mypy
run: |
. venv/bin/activate
mypy --ignore-missing-imports supervisor
pytest:
runs-on: ubuntu-latest
needs: prepare
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2
with:
cosign-release: "v2.4.3"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Install Pytest Annotation plugin
run: |
. venv/bin/activate
# Ideally this should be part of our dependencies
# However this plugin is fairly new and doesn't run correctly
# on a non-GitHub environment.
pip install pytest-github-actions-annotate-failures
- name: Run pytest
run: |
. venv/bin/activate
pytest \
-qq \
--timeout=10 \
--durations=10 \
--cov supervisor \
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.6.2
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
include-hidden-files: true
coverage:
name: Process test coverage
runs-on: ubuntu-latest
needs: ["pytest", "prepare"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
- name: Combine coverage results
run: |
. venv/bin/activate
coverage combine coverage*/.coverage*
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5.4.3

View File

@ -1,20 +0,0 @@
name: Lock
# yamllint disable-line rule:truthy
on:
schedule:
- cron: "0 0 * * *"
jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5.0.1
with:
github-token: ${{ github.token }}
issue-inactive-days: "30"
exclude-issue-created-before: "2020-10-01T00:00:00Z"
issue-lock-reason: ""
pr-inactive-days: "1"
exclude-pr-created-before: "2020-11-01T00:00:00Z"
pr-lock-reason: ""

View File

@ -1,14 +0,0 @@
{
"problemMatcher": [
{
"owner": "check-executables-have-shebangs",
"pattern": [
{
"regexp": "^(.+):\\s(.+)$",
"file": 1,
"message": 2
}
]
}
]
}

View File

@ -1,16 +0,0 @@
{
"problemMatcher": [
{
"owner": "check-json",
"pattern": [
{
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
"file": 1,
"message": 2,
"line": 3,
"column": 4
}
]
}
]
}

View File

@ -1,16 +0,0 @@
{
"problemMatcher": [
{
"owner": "hadolint",
"pattern": [
{
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
"file": 1,
"line": 2,
"message": 3,
"code": 4
}
]
}
]
}

View File

@ -1,16 +0,0 @@
{
"problemMatcher": [
{
"owner": "mypy",
"pattern": [
{
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
"file": 1,
"line": 2,
"severity": 3,
"message": 4
}
]
}
]
}

View File

@ -1,32 +0,0 @@
{
"problemMatcher": [
{
"owner": "pylint-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4,
"code": 5
}
]
},
{
"owner": "pylint-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4,
"code": 5
}
]
}
]
}

View File

@ -1,18 +0,0 @@
{
"problemMatcher": [
{
"owner": "python",
"pattern": [
{
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
"file": 1,
"line": 2
},
{
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
"message": 2
}
]
}
]
}

View File

@ -1,44 +0,0 @@
name: Release Drafter
on:
push:
branches:
- main
jobs:
update_release_draft:
runs-on: ubuntu-latest
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
with:
fetch-depth: 0
- name: Find Next Version
id: version
run: |
declare -i newpost
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
datepre=$(date --utc '+%Y.%m')
if [[ "$latestpre" == "$datepre" ]]; then
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
newpost=$latestpost+1
else
newpost=0
fi
echo Current version: $latest
echo New target version: $datepre.$newpost
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v6.1.0
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,58 +0,0 @@
name: Restrict task creation
# yamllint disable-line rule:truthy
on:
issues:
types: [opened]
jobs:
check-authorization:
runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.issue_type == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@v7
with:
script: |
const issueAuthor = context.payload.issue.user.login;
// Check if user is an organization member
try {
await github.rest.orgs.checkMembershipForUser({
org: 'home-assistant',
username: issueAuthor
});
console.log(`✅ ${issueAuthor} is an organization member`);
return; // Authorized
} catch (error) {
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
}
// Close the issue with a comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
`Task issues are restricted to Open Home Foundation staff and authorized contributors.\n\n` +
`If you would like to:\n` +
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)\n` +
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
`If you believe you should have access to create Task issues, please contact the maintainers.`
});
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
state: 'closed'
});
// Add a label to indicate this was auto-closed
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['auto-closed']
});

View File

@ -1,21 +0,0 @@
name: Sentry Release
# yamllint disable-line rule:truthy
on:
release:
types: [published, prereleased]
jobs:
createSentryRelease:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Sentry Release
uses: getsentry/action-release@v3.2.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
environment: production

View File

@ -1,39 +0,0 @@
name: Stale
# yamllint disable-line rule:truthy
on:
schedule:
- cron: "0 * * * *"
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9.1.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30
days-before-close: 7
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
stale-issue-message: >
There hasn't been any activity on this issue recently. Due to the
high number of incoming GitHub notifications, we have to clean some
of the old issues, as many of them have already been resolved with
the latest updates.
Please make sure to update to the latest version and check if that
solves the issue. Let us know if that works for you by
adding a comment 👍
This issue has now been marked as stale and will be closed if no
further activity occurs. Thank you for your contributions.
stale-pr-label: "stale"
exempt-pr-labels: "no-stale,pinned,rfc,security"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
Thank you for your contributions.

View File

@ -1,82 +0,0 @@
name: Update frontend
on:
schedule: # once a day
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
check-version:
runs-on: ubuntu-latest
outputs:
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
current_version: ${{ steps.check_version.outputs.current_version }}
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get latest frontend release
id: latest_frontend_version
uses: abatilo/release-info-action@v1.3.3
with:
owner: home-assistant
repo: frontend
- name: Check if version is up to date
id: check_version
run: |
current_version="$(cat .ha-frontend-version)"
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
if [[ ! "$current_version" < "$latest_version" ]]; then
echo "Frontend version is up to date"
echo "skip=true" >> $GITHUB_OUTPUT
fi
- name: Check if there is no open PR with this version
if: steps.check_version.outputs.skip != 'true'
id: check_existing_pr
env:
GH_TOKEN: ${{ github.token }}
run: |
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
if [[ "$PR" != "[]" ]]; then
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
echo "skip=true" >> $GITHUB_OUTPUT
fi
create-pr:
runs-on: ubuntu-latest
needs: check-version
if: needs.check-version.outputs.skip != 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Clear www folder
run: |
rm -rf supervisor/api/panel/*
- name: Update version file
run: |
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
- name: Download release assets
uses: robinraju/release-downloader@v1
with:
repository: 'home-assistant/frontend'
tag: ${{ needs.check-version.outputs.latest_version }}
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
extract: true
out-file-path: supervisor/api/panel/
- name: Remove release assets archive
run: |
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
- name: Create PR
uses: peter-evans/create-pull-request@v7
with:
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
branch: autoupdate-frontend
base: main
draft: true
sign-commits: true
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
body: >
Update frontend from ${{ needs.check-version.outputs.current_version }} to
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})

9
.gitignore vendored
View File

@ -92,11 +92,4 @@ ENV/
.pylint.d/
# VS Code
.vscode/*
!.vscode/cSpell.json
!.vscode/tasks.json
!.vscode/launch.json
# mypy
/.mypy_cache/*
/.dmypy.json
.vscode/

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer
branch = dev

View File

@ -1 +0,0 @@
20250401.0

View File

@ -1,7 +0,0 @@
ignored:
- DL3003
- DL3006
- DL3013
- DL3018
- DL3042
- SC2155

View File

@ -1,27 +0,0 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.10
hooks:
- id: ruff
args:
- --fix
- id: ruff-format
files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
- id: check-json
- repo: local
hooks:
# Run mypy through our wrapper script in order to get the possible
# pyenv and/or virtualenv activated; it may not have been e.g. if
# committing from a GUI tool that was not launched from an activated
# shell.
- id: mypy
name: mypy
entry: script/run-in-env.sh mypy --ignore-missing-imports
language: script
types_or: [python, pyi]
files: ^supervisor/.+\.(py|pyi)$

6
.travis.yml Normal file
View File

@ -0,0 +1,6 @@
sudo: true
dist: xenial
install: pip install -U tox
language: python
python: 3.7
script: tox

View File

@ -1,21 +0,0 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# Distribution / packaging
*.egg-info/
# General files
.git
.github
.devcontainer
.vscode
.tox
# Data
home-assistant-polymer/
script/
tests/
data/
venv/

25
.vscode/launch.json vendored
View File

@ -1,25 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Supervisor remote debug",
"type": "python",
"request": "attach",
"port": 33333,
"host": "172.30.32.2",
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "/usr/src/supervisor"
}
]
},
{
"name": "Debug Tests",
"type": "python",
"request": "test",
"console": "internalConsole",
"justMyCode": false
}
]
}

111
.vscode/tasks.json vendored
View File

@ -1,111 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Run Supervisor",
"type": "shell",
"command": "supervisor_run",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Run Supervisor CLI",
"type": "shell",
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Update Supervisor Panel",
"type": "shell",
"command": "LOKALISE_TOKEN='${input:localiseToken}' ./scripts/update-frontend.sh",
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Pytest",
"type": "shell",
"command": "pytest --timeout=10 tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Check",
"type": "shell",
"command": "ruff check --fix supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Format",
"type": "shell",
"command": "ruff format supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Pylint",
"type": "shell",
"command": "pylint supervisor",
"dependsOn": ["Install all Requirements"],
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
}
],
"inputs": [
{
"id": "localiseToken",
"type": "promptString",
"description": "Paste your lokalise token to download frontend translations"
}
]
}

690
API.md Normal file
View File

@ -0,0 +1,690 @@
# Hass.io
## Hass.io RESTful API
Interface for Home Assistant to control things from supervisor.
On error / Code 400:
```json
{
"result": "error",
"message": ""
}
```
On success / Code 200:
```json
{
"result": "ok",
"data": { }
}
```
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with envoriment `HASSIO_TOKEN`.
### Hass.io
- GET `/supervisor/ping`
This API call don't need a token.
- GET `/supervisor/info`
The addons from `addons` are only installed one.
```json
{
"version": "INSTALL_VERSION",
"last_version": "LAST_VERSION",
"arch": "armhf|aarch64|i386|amd64",
"channel": "stable|beta|dev",
"timezone": "TIMEZONE",
"wait_boot": "int",
"addons": [
{
"name": "xy bla",
"slug": "xy",
"description": "description",
"repository": "12345678|null",
"version": "LAST_VERSION",
"installed": "INSTALL_VERSION",
"icon": "bool",
"logo": "bool",
"state": "started|stopped",
}
],
"addons_repositories": [
"REPO_URL"
]
}
```
- POST `/supervisor/update`
Optional:
```json
{
"version": "VERSION"
}
```
- POST `/supervisor/options`
```json
{
"channel": "stable|beta|dev",
"timezone": "TIMEZONE",
"wait_boot": "int",
"addons_repositories": [
"REPO_URL"
]
}
```
- POST `/supervisor/reload`
Reload addons/version.
- GET `/supervisor/logs`
Output is the raw docker log.
- GET `/supervisor/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
### Snapshot
- GET `/snapshots`
```json
{
"snapshots": [
{
"slug": "SLUG",
"date": "ISO",
"name": "Custom name",
"type": "full|partial",
"protected": "bool"
}
]
}
```
- POST `/snapshots/reload`
- POST `/snapshots/new/upload`
return:
```json
{
"slug": ""
}
```
- POST `/snapshots/new/full`
```json
{
"name": "Optional",
"password": "Optional"
}
```
return:
```json
{
"slug": ""
}
```
- POST `/snapshots/new/partial`
```json
{
"name": "Optional",
"addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"],
"password": "Optional"
}
```
return:
```json
{
"slug": ""
}
```
- POST `/snapshots/reload`
- GET `/snapshots/{slug}/info`
```json
{
"slug": "SNAPSHOT ID",
"type": "full|partial",
"name": "custom snapshot name / description",
"date": "ISO",
"size": "SIZE_IN_MB",
"protected": "bool",
"homeassistant": "version",
"addons": [
{
"slug": "ADDON_SLUG",
"name": "NAME",
"version": "INSTALLED_VERSION",
"size": "SIZE_IN_MB"
}
],
"repositories": ["URL"],
"folders": ["NAME"]
}
```
- POST `/snapshots/{slug}/remove`
- GET `/snapshots/{slug}/download`
- POST `/snapshots/{slug}/restore/full`
```json
{
"password": "Optional"
}
```
- POST `/snapshots/{slug}/restore/partial`
```json
{
"homeassistant": "bool",
"addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"],
"password": "Optional"
}
```
### Host
- POST `/host/reload`
- POST `/host/shutdown`
- POST `/host/reboot`
- GET `/host/info`
```json
{
"hostname": "hostname|null",
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
"operating_system": "HassOS XY|Ubuntu 16.4|null",
"kernel": "4.15.7|null",
"chassis": "specific|null",
"deployment": "stable|beta|dev|null",
"cpe": "xy|null",
}
```
- POST `/host/options`
```json
{
"hostname": "",
}
```
- POST `/host/reload`
#### Services
- GET `/host/services`
```json
{
"services": [
{
"name": "xy.service",
"description": "XY ...",
"state": "active|"
}
]
}
```
- POST `/host/service/{unit}/stop`
- POST `/host/service/{unit}/start`
- POST `/host/service/{unit}/reload`
### HassOS
- GET `/hassos/info`
```json
{
"version": "2.3",
"version_cli": "7",
"version_latest": "2.4",
"version_cli_latest": "8",
"board": "ova|rpi"
}
```
- POST `/hassos/update`
```json
{
"version": "optional"
}
```
- POST `/hassos/update/cli`
```json
{
"version": "optional"
}
```
- POST `/hassos/config/sync`
Load host configs from a USB stick.
### Hardware
- GET `/hardware/info`
```json
{
"serial": ["/dev/xy"],
"input": ["Input device name"],
"disk": ["/dev/sdax"],
"gpio": ["gpiochip0", "gpiochip100"],
"audio": {
"CARD_ID": {
"name": "xy",
"type": "microphone",
"devices": {
"DEV_ID": "type of device"
}
}
}
}
```
- GET `/hardware/audio`
```json
{
"audio": {
"input": {
"0,0": "Mic"
},
"output": {
"1,0": "Jack",
"1,1": "HDMI"
}
}
}
```
### Home Assistant
- GET `/homeassistant/info`
```json
{
"version": "INSTALL_VERSION",
"last_version": "LAST_VERSION",
"machine": "Image machine type",
"image": "str",
"custom": "bool -> if custom image",
"boot": "bool",
"port": 8123,
"ssl": "bool",
"watchdog": "bool",
"startup_time": 600
}
```
- POST `/homeassistant/update`
Optional:
```json
{
"version": "VERSION"
}
```
- GET `/homeassistant/logs`
Output is the raw Docker log.
- POST `/homeassistant/restart`
- POST `/homeassistant/check`
- POST `/homeassistant/start`
- POST `/homeassistant/stop`
- POST `/homeassistant/options`
```json
{
"image": "Optional|null",
"last_version": "Optional for custom image|null",
"port": "port for access hass",
"ssl": "bool",
"password": "",
"refresh_token": "",
"watchdog": "bool",
"startup_time": 600
}
```
Image with `null` and last_version with `null` reset this options.
- POST/GET `/homeassistant/api`
Proxy to real home-assistant instance.
- GET `/homeassistant/websocket`
Proxy to real websocket instance.
- GET `/homeassistant/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
### RESTful for API addons
If an add-on will call itself, you can use `/addons/self/...`.
- GET `/addons`
Get all available addons.
```json
{
"addons": [
{
"name": "xy bla",
"slug": "xy",
"description": "description",
"repository": "core|local|REP_ID",
"version": "LAST_VERSION",
"installed": "none|INSTALL_VERSION",
"detached": "bool",
"available": "bool",
"build": "bool",
"url": "null|url",
"icon": "bool",
"logo": "bool"
}
],
"repositories": [
{
"slug": "12345678",
"name": "Repitory Name|unknown",
"source": "URL_OF_REPOSITORY",
"url": "WEBSITE|REPOSITORY",
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
}
]
}
```
- POST `/addons/reload`
- GET `/addons/{addon}/info`
```json
{
"name": "xy bla",
"slug": "xdssd_xybla",
"description": "description",
"long_description": "null|markdown",
"auto_update": "bool",
"url": "null|url of addon",
"detached": "bool",
"available": "bool",
"arch": ["armhf", "aarch64", "i386", "amd64"],
"machine": "[raspberrypi2, tinker]",
"repository": "12345678|null",
"version": "null|VERSION_INSTALLED",
"last_version": "LAST_VERSION",
"state": "none|started|stopped",
"boot": "auto|manual",
"build": "bool",
"options": "{}",
"network": "{}|null",
"host_network": "bool",
"host_pid": "bool",
"host_ipc": "bool",
"host_dbus": "bool",
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
"apparmor": "disable|default|profile",
"devices": ["/dev/xy"],
"auto_uart": "bool",
"icon": "bool",
"logo": "bool",
"changelog": "bool",
"hassio_api": "bool",
"hassio_role": "default|homeassistant|manager|admin",
"homeassistant_api": "bool",
"auth_api": "bool",
"full_access": "bool",
"protected": "bool",
"rating": "1-6",
"stdin": "bool",
"webui": "null|http(s)://[HOST]:port/xy/zx",
"gpio": "bool",
"devicetree": "bool",
"docker_api": "bool",
"audio": "bool",
"audio_input": "null|0,0",
"audio_output": "null|0,0",
"services_role": "['service:access']",
"discovery": "['service']"
}
```
- GET `/addons/{addon}/icon`
- GET `/addons/{addon}/logo`
- GET `/addons/{addon}/changelog`
- POST `/addons/{addon}/options`
```json
{
"boot": "auto|manual",
"auto_update": "bool",
"network": {
"CONTAINER": "port|[ip, port]"
},
"options": {},
"audio_output": "null|0,0",
"audio_input": "null|0,0"
}
```
Reset custom network/audio/options, set it `null`.
- POST `/addons/{addon}/security`
This function is not callable by itself.
```json
{
"protected": "bool",
}
```
- POST `/addons/{addon}/start`
- POST `/addons/{addon}/stop`
- POST `/addons/{addon}/install`
- POST `/addons/{addon}/uninstall`
- POST `/addons/{addon}/update`
- GET `/addons/{addon}/logs`
Output is the raw Docker log.
- POST `/addons/{addon}/restart`
- POST `/addons/{addon}/rebuild`
Only supported for local build addons
- POST `/addons/{addon}/stdin`
Write data to add-on stdin
- GET `/addons/{addon}/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
### discovery
- GET `/discovery`
```json
{
"discovery": [
{
"addon": "slug",
"service": "name",
"uuid": "uuid",
"config": {}
}
]
}
```
- GET `/discovery/{UUID}`
```json
{
"addon": "slug",
"service": "name",
"uuid": "uuid",
"config": {}
}
```
- POST `/discovery`
```json
{
"service": "name",
"config": {}
}
```
return:
```json
{
"uuid": "uuid"
}
```
- DEL `/discovery/{UUID}`
### Services
- GET `/services`
```json
{
"services": [
{
"slug": "name",
"available": "bool",
"providers": "list"
}
]
}
```
#### MQTT
- GET `/services/mqtt`
```json
{
"addon": "name",
"host": "xy",
"port": "8883",
"ssl": "bool",
"username": "optional",
"password": "optional",
"protocol": "3.1.1"
}
```
- POST `/services/mqtt`
```json
{
"host": "xy",
"port": "8883",
"ssl": "bool|optional",
"username": "optional",
"password": "optional",
"protocol": "3.1.1"
}
```
- DEL `/services/mqtt`
### Misc
- GET `/info`
```json
{
"supervisor": "version",
"homeassistant": "version",
"hassos": "null|version",
"hostname": "name",
"machine": "type",
"arch": "arch",
"channel": "stable|beta|dev"
}
```
### Auth / SSO API
You can use the user system on homeassistant. We handle this auth system on
supervisor.
You can call post `/auth`
We support:
- Json `{ "user|name": "...", "password": "..." }`
- application/x-www-form-urlencoded `user|name=...&password=...`
- BasicAuth

View File

@ -1 +0,0 @@
.github/copilot-instructions.md

View File

@ -1,53 +1,27 @@
ARG BUILD_FROM
FROM ${BUILD_FROM}
ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
UV_SYSTEM_PYTHON=true
ARG \
COSIGN_VERSION \
BUILD_ARCH \
QEMU_CPU
FROM $BUILD_FROM
# Install base
WORKDIR /usr/src
RUN \
set -x \
&& apk add --no-cache \
findutils \
eudev \
eudev-libs \
git \
libffi \
libpulse \
musl \
openssl \
yaml \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.6.17
RUN apk add --no-cache \
git \
socat \
glib \
libstdc++ \
eudev-libs
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
setarch="linux32"; \
else \
setarch=""; \
fi \
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
&& rm -f requirements.txt
COPY requirements.txt /usr/src/
RUN apk add --no-cache --virtual .build-dependencies \
make \
g++ \
&& export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
&& apk del .build-dependencies \
&& rm -f /usr/src/requirements.txt
# Install Home Assistant Supervisor
COPY . supervisor
RUN \
uv pip install --no-cache -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor
# Install HassIO
COPY . /usr/src/hassio
RUN pip3 install --no-cache-dir /usr/src/hassio \
&& rm -rf /usr/src/hassio
WORKDIR /
COPY rootfs /
CMD [ "python3", "-m", "hassio" ]

View File

@ -178,7 +178,7 @@
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Copyright 2017 Pascal Vizeli
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@ -1,3 +1,3 @@
include LICENSE.md
graft supervisor
graft hassio
recursive-exclude * *.py[co]

View File

@ -1,34 +1,28 @@
# Home Assistant Supervisor
# Hass.io
## First private cloud solution for home automation
Home Assistant (former Hass.io) is a container-based system for managing your
Home Assistant Core installation and related applications. The system is
controlled via Home Assistant which communicates with the Supervisor. The
Supervisor provides an API to manage the installation. This includes changing
network settings or installing and updating software.
Hass.io is a Docker-based system for managing your Home Assistant installation
and related applications. The system is controlled via Home Assistant which
communicates with the Supervisor. The Supervisor provides an API to manage the
installation. This includes changing network settings or installing
and updating software.
![](misc/hassio.png?raw=true)
## Installation
Installation instructions can be found at https://home-assistant.io/getting-started.
Installation instructions can be found at <https://home-assistant.io/hassio>.
## Development
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
Development instructions can be found [here][development].
The development of the supervisor is a bit tricky. Not difficult but tricky.
## Release
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
- Go into a HassOS device or VM and pull your supervisor.
- Set the developer modus on updater.json
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
- Test your changes
Releases are done in 3 stages (channels) with this structure:
1. Pull requests are merged to the `main` branch.
2. A new build is pushed to the `dev` stage.
3. Releases are published.
4. A new build is pushed to the `beta` stage.
5. The [`stable.json`][stable] file is updated.
6. The build that was pushed to `beta` will now be pushed to `stable`.
[development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.

View File

@ -1,24 +0,0 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.22
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.22
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.22
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.22
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.22
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.4.3
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor
org.opencontainers.image.description: Container-based system for managing Home Assistant Core installation
org.opencontainers.image.source: https://github.com/home-assistant/supervisor
org.opencontainers.image.authors: The Home Assistant Authors
org.opencontainers.image.url: https://www.home-assistant.io/
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
org.opencontainers.image.licenses: Apache License 2.0

View File

@ -1,11 +0,0 @@
codecov:
branch: dev
coverage:
status:
project:
default:
target: 40
threshold: 0.09
comment: false
github_checks:
annotations: false

1
hassio/__init__.py Normal file
View File

@ -0,0 +1 @@
"""Init file for Hass.io."""

55
hassio/__main__.py Normal file
View File

@ -0,0 +1,55 @@
"""Main file for Hass.io."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging
import sys
from hassio import bootstrap
_LOGGER = logging.getLogger(__name__)
def attempt_use_uvloop():
"""Attempt to use uvloop."""
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass
# pylint: disable=invalid-name
if __name__ == "__main__":
bootstrap.initialize_logging()
attempt_use_uvloop()
loop = asyncio.get_event_loop()
if not bootstrap.check_environment():
sys.exit(1)
# init executor pool
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor)
_LOGGER.info("Initialize Hass.io setup")
coresys = bootstrap.initialize_coresys(loop)
bootstrap.migrate_system_env(coresys)
_LOGGER.info("Setup HassIO")
loop.run_until_complete(coresys.core.setup())
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
try:
_LOGGER.info("Run Hass.io")
loop.run_forever()
finally:
_LOGGER.info("Stopping Hass.io")
loop.run_until_complete(coresys.core.stop())
executor.shutdown(wait=False)
loop.close()
_LOGGER.info("Close Hass.io")
sys.exit(0)

158
hassio/addons/__init__.py Normal file
View File

@ -0,0 +1,158 @@
"""Init file for Hass.io add-ons."""
import asyncio
import logging
from .addon import Addon
from .repository import Repository
from .data import AddonsData
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Hass.io."""
def __init__(self, coresys):
"""Initialize Docker base wrapper."""
self.coresys = coresys
self.data = AddonsData(coresys)
self.addons_obj = {}
self.repositories_obj = {}
@property
def list_addons(self):
"""Return a list of all add-ons."""
return list(self.addons_obj.values())
@property
def list_installed(self):
"""Return a list of installed add-ons."""
return [addon for addon in self.addons_obj.values()
if addon.is_installed]
@property
def list_repositories(self):
"""Return list of add-on repositories."""
return list(self.repositories_obj.values())
def get(self, addon_slug):
"""Return an add-on from slug."""
return self.addons_obj.get(addon_slug)
def from_token(self, token):
"""Return an add-on from Hass.io token."""
for addon in self.list_addons:
if addon.is_installed and token == addon.hassio_token:
return addon
return None
async def load(self):
"""Start up add-on management."""
self.data.reload()
# Init Hass.io built-in repositories
repositories = \
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
# Init custom repositories and load add-ons
await self.load_repositories(repositories)
async def reload(self):
"""Update add-ons from repository and reload list."""
tasks = [repository.update() for repository in
self.repositories_obj.values()]
if tasks:
await asyncio.wait(tasks)
# read data from repositories
self.data.reload()
# update addons
await self.load_addons()
async def load_repositories(self, list_repositories):
"""Add a new custom repository."""
new_rep = set(list_repositories)
old_rep = set(self.repositories_obj)
# add new repository
async def _add_repository(url):
"""Helper function to async add repository."""
repository = Repository(self.coresys, url)
if not await repository.load():
_LOGGER.error("Can't load from repository %s", url)
return
self.repositories_obj[url] = repository
# don't add built-in repository to config
if url not in BUILTIN_REPOSITORIES:
self.sys_config.add_addon_repository(url)
tasks = [_add_repository(url) for url in new_rep - old_rep]
if tasks:
await asyncio.wait(tasks)
# del new repository
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
self.repositories_obj.pop(url).remove()
self.sys_config.drop_addon_repository(url)
# update data
self.data.reload()
await self.load_addons()
async def load_addons(self):
"""Update/add internal add-on store."""
all_addons = set(self.data.system) | set(self.data.cache)
# calc diff
add_addons = all_addons - set(self.addons_obj)
del_addons = set(self.addons_obj) - all_addons
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
len(all_addons), len(add_addons), len(del_addons))
# new addons
tasks = []
for addon_slug in add_addons:
addon = Addon(self.coresys, addon_slug)
tasks.append(addon.load())
self.addons_obj[addon_slug] = addon
if tasks:
await asyncio.wait(tasks)
# remove
for addon_slug in del_addons:
self.addons_obj.pop(addon_slug)
async def boot(self, stage):
"""Boot add-ons with mode auto."""
tasks = []
for addon in self.addons_obj.values():
if addon.is_installed and addon.boot == BOOT_AUTO and \
addon.startup == stage:
tasks.append(addon.start())
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks)
await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage):
"""Shutdown addons."""
tasks = []
for addon in self.addons_obj.values():
if addon.is_installed and \
await addon.state() == STATE_STARTED and \
addon.startup == stage:
tasks.append(addon.stop())
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks)

965
hassio/addons/addon.py Normal file
View File

@ -0,0 +1,965 @@
"""Init file for Hass.io add-ons."""
from contextlib import suppress
from copy import deepcopy
import logging
import json
from pathlib import Path, PurePath
import re
import shutil
import tarfile
from tempfile import TemporaryDirectory
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .validate import (
validate_options, SCHEMA_ADDON_SNAPSHOT, RE_VOLUME, RE_SERVICE,
MACHINE_ALL)
from .utils import check_installed, remove_data
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_FULL_ACCESS,
ATTR_PROTECTED, ATTR_ACCESS_TOKEN, ATTR_HOST_PID, ATTR_HASSIO_ROLE,
ATTR_MACHINE, ATTR_AUTH_API,
SECURITY_PROFILE, SECURITY_DISABLE, SECURITY_DEFAULT)
from ..coresys import CoreSysAttributes
from ..docker.addon import DockerAddon
from ..utils import create_token
from ..utils.json import write_json_file, read_json_file
from ..utils.apparmor import adjust_profile
from ..exceptions import HostAppArmorError
_LOGGER = logging.getLogger(__name__)
RE_WEBUI = re.compile(
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
class Addon(CoreSysAttributes):
"""Hold data for add-on inside Hass.io."""
def __init__(self, coresys, slug):
"""Initialize data holder."""
self.coresys = coresys
self.instance = DockerAddon(coresys, slug)
self._id = slug
async def load(self):
"""Async initialize of object."""
if self.is_installed:
await self.instance.attach()
@property
def slug(self):
"""Return slug/id of add-on."""
return self._id
@property
def _mesh(self):
"""Return add-on data from system or cache."""
return self._data.system.get(self._id, self._data.cache.get(self._id))
@property
def _data(self):
"""Return add-ons data storage."""
return self.sys_addons.data
@property
def is_installed(self):
"""Return True if an add-on is installed."""
return self._id in self._data.system
@property
def is_detached(self):
"""Return True if add-on is detached."""
return self._id not in self._data.cache
@property
def available(self):
"""Return True if this add-on is available on this platform."""
if self.sys_arch not in self.supported_arch:
return False
if self.sys_machine not in self.supported_machine:
return False
return True
@property
def version_installed(self):
"""Return installed version."""
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
def _set_install(self, version):
"""Set addon as installed."""
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
self._data.user[self._id] = {
ATTR_OPTIONS: {},
ATTR_VERSION: version,
}
self._data.save_data()
def _set_uninstall(self):
"""Set add-on as uninstalled."""
self._data.system.pop(self._id, None)
self._data.user.pop(self._id, None)
self._data.save_data()
def _set_update(self, version):
"""Update version of add-on."""
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
self._data.user[self._id][ATTR_VERSION] = version
self._data.save_data()
def _restore_data(self, user, system):
"""Restore data to add-on."""
self._data.user[self._id] = deepcopy(user)
self._data.system[self._id] = deepcopy(system)
self._data.save_data()
@property
def options(self):
"""Return options with local changes."""
if self.is_installed:
return {
**self._data.system[self._id][ATTR_OPTIONS],
**self._data.user[self._id][ATTR_OPTIONS]
}
return self._data.cache[self._id][ATTR_OPTIONS]
@options.setter
def options(self, value):
"""Store user add-on options."""
if value is None:
self._data.user[self._id][ATTR_OPTIONS] = {}
else:
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
@property
def boot(self):
"""Return boot config with prio local settings."""
if ATTR_BOOT in self._data.user.get(self._id, {}):
return self._data.user[self._id][ATTR_BOOT]
return self._mesh[ATTR_BOOT]
@boot.setter
def boot(self, value):
"""Store user boot options."""
self._data.user[self._id][ATTR_BOOT] = value
@property
def auto_update(self):
"""Return if auto update is enable."""
if ATTR_AUTO_UPDATE in self._data.user.get(self._id, {}):
return self._data.user[self._id][ATTR_AUTO_UPDATE]
return None
@auto_update.setter
def auto_update(self, value):
"""Set auto update."""
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
@property
def name(self):
"""Return name of add-on."""
return self._mesh[ATTR_NAME]
@property
def timeout(self):
"""Return timeout of addon for docker stop."""
return self._mesh[ATTR_TIMEOUT]
@property
def uuid(self):
"""Return an API token for this add-on."""
if self.is_installed:
return self._data.user[self._id][ATTR_UUID]
return None
@property
def hassio_token(self):
"""Return access token for Hass.io API."""
if self.is_installed:
return self._data.user[self._id].get(ATTR_ACCESS_TOKEN)
return None
@property
def description(self):
"""Return description of add-on."""
return self._mesh[ATTR_DESCRIPTON]
@property
def long_description(self):
"""Return README.md as long_description."""
readme = Path(self.path_location, 'README.md')
# If readme not exists
if not readme.exists():
return None
# Return data
with readme.open('r') as readme_file:
return readme_file.read()
@property
def repository(self):
"""Return repository of add-on."""
return self._mesh[ATTR_REPOSITORY]
@property
def last_version(self):
"""Return version of add-on."""
if self._id in self._data.cache:
return self._data.cache[self._id][ATTR_VERSION]
return self.version_installed
@property
def protected(self):
"""Return if add-on is in protected mode."""
if self.is_installed:
return self._data.user[self._id][ATTR_PROTECTED]
return True
@protected.setter
def protected(self, value):
"""Set add-on in protected mode."""
self._data.user[self._id][ATTR_PROTECTED] = value
@property
def startup(self):
"""Return startup type of add-on."""
return self._mesh.get(ATTR_STARTUP)
@property
def services_role(self):
"""Return dict of services with rights."""
raw_services = self._mesh.get(ATTR_SERVICES)
if not raw_services:
return {}
services = {}
for data in raw_services:
service = RE_SERVICE.match(data)
services[service.group('service')] = service.group('rights')
return services
@property
def discovery(self):
"""Return list of discoverable components/platforms."""
return self._mesh.get(ATTR_DISCOVERY, [])
@property
def ports(self):
"""Return ports of add-on."""
if self.host_network or ATTR_PORTS not in self._mesh:
return None
if not self.is_installed or \
ATTR_NETWORK not in self._data.user[self._id]:
return self._mesh[ATTR_PORTS]
return self._data.user[self._id][ATTR_NETWORK]
@ports.setter
def ports(self, value):
"""Set custom ports of add-on."""
if value is None:
self._data.user[self._id].pop(ATTR_NETWORK, None)
else:
new_ports = {}
for container_port, host_port in value.items():
if container_port in self._mesh.get(ATTR_PORTS, {}):
new_ports[container_port] = host_port
self._data.user[self._id][ATTR_NETWORK] = new_ports
@property
def webui(self):
"""Return URL to webui or None."""
if ATTR_WEBUI not in self._mesh:
return None
webui = RE_WEBUI.match(self._mesh[ATTR_WEBUI])
# extract arguments
t_port = webui.group('t_port')
t_proto = webui.group('t_proto')
s_prefix = webui.group('s_prefix') or ""
s_suffix = webui.group('s_suffix') or ""
# search host port for this docker port
if self.ports is None:
port = t_port
else:
port = self.ports.get(f"{t_port}/tcp", t_port)
# for interface config or port lists
if isinstance(port, (tuple, list)):
port = port[-1]
# lookup the correct protocol from config
if t_proto:
proto = 'https' if self.options[t_proto] else 'http'
else:
proto = s_prefix
return f"{proto}://[HOST]:{port}{s_suffix}"
@property
def host_network(self):
"""Return True if add-on run on host network."""
return self._mesh[ATTR_HOST_NETWORK]
@property
def host_pid(self):
"""Return True if add-on run on host PID namespace."""
return self._mesh[ATTR_HOST_PID]
@property
def host_ipc(self):
"""Return True if add-on run on host IPC namespace."""
return self._mesh[ATTR_HOST_IPC]
@property
def host_dbus(self):
"""Return True if add-on run on host D-BUS."""
return self._mesh[ATTR_HOST_DBUS]
@property
def devices(self):
"""Return devices of add-on."""
return self._mesh.get(ATTR_DEVICES)
@property
def auto_uart(self):
"""Return True if we should map all UART device."""
return self._mesh.get(ATTR_AUTO_UART)
@property
def tmpfs(self):
"""Return tmpfs of add-on."""
return self._mesh.get(ATTR_TMPFS)
@property
def environment(self):
"""Return environment of add-on."""
return self._mesh.get(ATTR_ENVIRONMENT)
@property
def privileged(self):
"""Return list of privilege."""
return self._mesh.get(ATTR_PRIVILEGED, [])
@property
def apparmor(self):
"""Return True if AppArmor is enabled."""
if not self._mesh.get(ATTR_APPARMOR):
return SECURITY_DISABLE
elif self.sys_host.apparmor.exists(self.slug):
return SECURITY_PROFILE
return SECURITY_DEFAULT
@property
def legacy(self):
"""Return if the add-on don't support Home Assistant labels."""
return self._mesh.get(ATTR_LEGACY)
@property
def access_docker_api(self):
"""Return if the add-on need read-only Docker API access."""
return self._mesh.get(ATTR_DOCKER_API)
@property
def access_hassio_api(self):
"""Return True if the add-on access to Hass.io REASTful API."""
return self._mesh[ATTR_HASSIO_API]
@property
def access_homeassistant_api(self):
"""Return True if the add-on access to Home Assistant API proxy."""
return self._mesh[ATTR_HOMEASSISTANT_API]
@property
def hassio_role(self):
"""Return Hass.io role for API."""
return self._mesh[ATTR_HASSIO_ROLE]
@property
def with_stdin(self):
"""Return True if the add-on access use stdin input."""
return self._mesh[ATTR_STDIN]
@property
def with_gpio(self):
"""Return True if the add-on access to GPIO interface."""
return self._mesh[ATTR_GPIO]
@property
def with_full_access(self):
"""Return True if the add-on want full access to hardware."""
return self._mesh[ATTR_FULL_ACCESS]
@property
def with_devicetree(self):
"""Return True if the add-on read access to devicetree."""
return self._mesh[ATTR_DEVICETREE]
@property
def access_auth_api(self):
"""Return True if the add-on access to login/auth backend."""
return self._mesh[ATTR_AUTH_API]
@property
def with_audio(self):
"""Return True if the add-on access to audio."""
return self._mesh[ATTR_AUDIO]
@property
def audio_output(self):
"""Return ALSA config for output or None."""
if not self.with_audio:
return None
if self.is_installed and \
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
return self.sys_host.alsa.default.output
@audio_output.setter
def audio_output(self, value):
"""Set/reset audio output settings."""
if value is None:
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
else:
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
@property
def audio_input(self):
"""Return ALSA config for input or None."""
if not self.with_audio:
return None
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
return self._data.user[self._id][ATTR_AUDIO_INPUT]
return self.sys_host.alsa.default.input
@audio_input.setter
def audio_input(self, value):
"""Set/reset audio input settings."""
if value is None:
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
else:
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
@property
def url(self):
"""Return URL of add-on."""
return self._mesh.get(ATTR_URL)
@property
def with_icon(self):
"""Return True if an icon exists."""
return self.path_icon.exists()
@property
def with_logo(self):
"""Return True if a logo exists."""
return self.path_logo.exists()
@property
def with_changelog(self):
"""Return True if a changelog exists."""
return self.path_changelog.exists()
@property
def supported_arch(self):
"""Return list of supported arch."""
return self._mesh[ATTR_ARCH]
@property
def supported_machine(self):
"""Return list of supported machine."""
return self._mesh.get(ATTR_MACHINE) or MACHINE_ALL
@property
def image(self):
"""Return image name of add-on."""
addon_data = self._mesh
# Repository with Dockerhub images
if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
# local build
return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self.sys_arch,
addon_data[ATTR_SLUG])
@property
def need_build(self):
"""Return True if this add-on need a local build."""
return ATTR_IMAGE not in self._mesh
@property
def map_volumes(self):
"""Return a dict of {volume: policy} from add-on."""
volumes = {}
for volume in self._mesh[ATTR_MAP]:
result = RE_VOLUME.match(volume)
volumes[result.group(1)] = result.group(2) or 'ro'
return volumes
@property
def path_data(self):
"""Return add-on data path inside Supervisor."""
return Path(self.sys_config.path_addons_data, self._id)
@property
def path_extern_data(self):
"""Return add-on data path external for Docker."""
return PurePath(self.sys_config.path_extern_addons_data, self._id)
@property
def path_options(self):
"""Return path to add-on options."""
return Path(self.path_data, "options.json")
@property
def path_location(self):
"""Return path to this add-on."""
return Path(self._mesh[ATTR_LOCATON])
@property
def path_icon(self):
"""Return path to add-on icon."""
return Path(self.path_location, 'icon.png')
@property
def path_logo(self):
"""Return path to add-on logo."""
return Path(self.path_location, 'logo.png')
@property
def path_changelog(self):
"""Return path to add-on changelog."""
return Path(self.path_location, 'CHANGELOG.md')
@property
def path_apparmor(self):
"""Return path to custom AppArmor profile."""
return Path(self.path_location, 'apparmor.txt')
@property
def path_asound(self):
"""Return path to asound config."""
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
@property
def path_extern_asound(self):
"""Return path to asound config for Docker."""
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
def save_data(self):
"""Save data of add-on."""
self.sys_addons.data.save_data()
def write_options(self):
"""Return True if add-on options is written to data."""
schema = self.schema
options = self.options
try:
schema(options)
write_json_file(self.path_options, options)
except vol.Invalid as ex:
_LOGGER.error("Add-on %s have wrong options: %s", self._id,
humanize_error(options, ex))
except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Add-on %s can't write options: %s", self._id, err)
else:
return True
return False
def remove_discovery(self):
"""Remove all discovery message from add-on."""
for message in self.sys_discovery.list_messages:
if message.addon != self.slug:
continue
self.sys_discovery.remove(message)
def write_asound(self):
"""Write asound config to file and return True on success."""
asound_config = self.sys_host.alsa.asound(
alsa_input=self.audio_input, alsa_output=self.audio_output)
try:
with self.path_asound.open('w') as config_file:
config_file.write(asound_config)
except OSError as err:
_LOGGER.error("Add-on %s can't write asound: %s", self._id, err)
return False
return True
async def _install_apparmor(self):
"""Install or Update AppArmor profile for Add-on."""
exists_local = self.sys_host.apparmor.exists(self.slug)
exists_addon = self.path_apparmor.exists()
# Nothing to do
if not exists_local and not exists_addon:
return
# Need removed
if exists_local and not exists_addon:
await self.sys_host.apparmor.remove_profile(self.slug)
return
# Need install/update
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
profile_file = Path(tmp_folder, 'apparmor.txt')
adjust_profile(self.slug, self.path_apparmor, profile_file)
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
@property
def schema(self):
"""Create a schema for add-on options."""
raw_schema = self._mesh[ATTR_SCHEMA]
if isinstance(raw_schema, bool):
return vol.Schema(dict)
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
def test_update_schema(self):
"""Check if the existing configuration is valid after update."""
if not self.is_installed or self.is_detached:
return True
# load next schema
new_raw_schema = self._data.cache[self._id][ATTR_SCHEMA]
default_options = self._data.cache[self._id][ATTR_OPTIONS]
# if disabled
if isinstance(new_raw_schema, bool):
return True
# merge options
options = {
**self._data.user[self._id][ATTR_OPTIONS],
**default_options,
}
# create voluptuous
new_schema = \
vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
# validate
try:
new_schema(options)
except vol.Invalid:
return False
return True
async def install(self):
"""Install an add-on."""
if not self.available:
_LOGGER.error(
"Add-on %s not supported on %s with %s architecture",
self._id, self.sys_machine, self.sys_arch)
return False
if self.is_installed:
_LOGGER.error("Add-on %s is already installed", self._id)
return False
if not self.path_data.is_dir():
_LOGGER.info(
"Create Home Assistant add-on data folder %s", self.path_data)
self.path_data.mkdir()
# Setup/Fix AppArmor profile
await self._install_apparmor()
if not await self.instance.install(self.last_version):
return False
self._set_install(self.last_version)
return True
@check_installed
async def uninstall(self):
"""Remove an add-on."""
if not await self.instance.remove():
return False
if self.path_data.is_dir():
_LOGGER.info(
"Remove Home Assistant add-on data folder %s", self.path_data)
await remove_data(self.path_data)
# Cleanup audio settings
if self.path_asound.exists():
with suppress(OSError):
self.path_asound.unlink()
# Cleanup AppArmor profile
if self.sys_host.apparmor.exists(self.slug):
with suppress(HostAppArmorError):
await self.sys_host.apparmor.remove_profile(self.slug)
# Remove discovery messages
self.remove_discovery()
self._set_uninstall()
return True
async def state(self):
"""Return running state of add-on."""
if not self.is_installed:
return STATE_NONE
if await self.instance.is_running():
return STATE_STARTED
return STATE_STOPPED
@check_installed
async def start(self):
"""Set options and start add-on."""
if await self.instance.is_running():
_LOGGER.warning("%s already running!", self.slug)
return
# Access Token
self._data.user[self._id][ATTR_ACCESS_TOKEN] = create_token()
self._data.save_data()
# Options
if not self.write_options():
return False
# Sound
if self.with_audio and not self.write_asound():
return False
return await self.instance.run()
@check_installed
def stop(self):
"""Stop add-on.
Return a coroutine.
"""
return self.instance.stop()
@check_installed
async def update(self):
"""Update add-on."""
last_state = await self.state()
if self.last_version == self.version_installed:
_LOGGER.warning("No update available for add-on %s", self._id)
return False
if not await self.instance.update(self.last_version):
return False
self._set_update(self.last_version)
# Setup/Fix AppArmor profile
await self._install_apparmor()
# restore state
if last_state == STATE_STARTED:
await self.start()
return True
@check_installed
async def restart(self):
"""Restart add-on."""
await self.stop()
return await self.start()
@check_installed
def logs(self):
"""Return add-ons log output.
Return a coroutine.
"""
return self.instance.logs()
@check_installed
def stats(self):
"""Return stats of container.
Return a coroutine.
"""
return self.instance.stats()
@check_installed
async def rebuild(self):
"""Perform a rebuild of local build add-on."""
last_state = await self.state()
if not self.need_build:
_LOGGER.error("Can't rebuild a none local build add-on!")
return False
# remove docker container but not addon config
if not await self.instance.remove():
return False
if not await self.instance.install(self.version_installed):
return False
# restore state
if last_state == STATE_STARTED:
await self.start()
return True
@check_installed
async def write_stdin(self, data):
"""Write data to add-on stdin.
Return a coroutine.
"""
if not self.with_stdin:
_LOGGER.error("Add-on don't support write to stdin!")
return False
return await self.instance.write_stdin(data)
@check_installed
async def snapshot(self, tar_file):
"""Snapshot state of an add-on."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
# store local image
if self.need_build and not await \
self.instance.export_image(Path(temp, 'image.tar')):
return False
data = {
ATTR_USER: self._data.user.get(self._id, {}),
ATTR_SYSTEM: self._data.system.get(self._id, {}),
ATTR_VERSION: self.version_installed,
ATTR_STATE: await self.state(),
}
# Store local configs/state
try:
write_json_file(Path(temp, 'addon.json'), data)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Can't save meta for %s: %s", self._id, err)
return False
# Store AppArmor Profile
if self.sys_host.apparmor.exists(self.slug):
profile = Path(temp, 'apparmor.txt')
try:
self.sys_host.apparmor.backup_profile(self.slug, profile)
except HostAppArmorError:
_LOGGER.error("Can't backup AppArmor profile")
return False
# write into tarfile
def _write_tarfile():
"""Write tar inside loop."""
with tar_file as snapshot:
snapshot.add(temp, arcname=".")
snapshot.add(self.path_data, arcname="data")
try:
_LOGGER.info("Build snapshot for add-on %s", self._id)
await self.sys_run_in_executor(_write_tarfile)
except (tarfile.TarError, OSError) as err:
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
return False
_LOGGER.info("Finish snapshot for addon %s", self._id)
return True
async def restore(self, tar_file):
"""Restore state of an add-on."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
# extract snapshot
def _extract_tarfile():
"""Extract tar snapshot."""
with tar_file as snapshot:
snapshot.extractall(path=Path(temp))
try:
await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
return False
# Read snapshot data
try:
data = read_json_file(Path(temp, 'addon.json'))
except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Can't read addon.json: %s", err)
# Validate
try:
data = SCHEMA_ADDON_SNAPSHOT(data)
except vol.Invalid as err:
_LOGGER.error("Can't validate %s, snapshot data: %s",
self._id, humanize_error(data, err))
return False
# Restore data or reload add-on
_LOGGER.info("Restore config for addon %s", self._id)
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
# Check version / restore image
version = data[ATTR_VERSION]
if not await self.instance.exists():
_LOGGER.info("Restore image for addon %s", self._id)
image_file = Path(temp, 'image.tar')
if image_file.is_file():
await self.instance.import_image(image_file, version)
else:
if await self.instance.install(version):
await self.instance.cleanup()
else:
await self.instance.stop()
# Restore data
def _restore_data():
"""Restore data."""
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
_LOGGER.info("Restore data for addon %s", self._id)
if self.path_data.is_dir():
await remove_data(self.path_data)
try:
await self.sys_run_in_executor(_restore_data)
except shutil.Error as err:
_LOGGER.error("Can't restore origin data: %s", err)
return False
# Restore AppArmor
profile_file = Path(temp, 'apparmor.txt')
if profile_file.exists():
try:
await self.sys_host.apparmor.load_profile(
self.slug, profile_file)
except HostAppArmorError:
_LOGGER.error("Can't restore AppArmor profile")
return False
# Run add-on
if data[ATTR_STATE] == STATE_STARTED:
return await self.start()
_LOGGER.info("Finish restore for add-on %s", self._id)
return True

77
hassio/addons/build.py Normal file
View File

@ -0,0 +1,77 @@
"""Hass.io add-on build environment."""
from pathlib import Path
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
from ..coresys import CoreSysAttributes
from ..utils.json import JsonConfig
class AddonBuild(JsonConfig, CoreSysAttributes):
"""Handle build options for add-ons."""
def __init__(self, coresys, slug):
"""Initialize Hass.io add-on builder."""
self.coresys = coresys
self._id = slug
super().__init__(
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
def save_data(self):
"""Ignore save function."""
pass
@property
def addon(self):
"""Return add-on of build data."""
return self.sys_addons.get(self._id)
@property
def base_image(self):
"""Base images for this add-on."""
return self._data[ATTR_BUILD_FROM].get(
self.sys_arch, BASE_IMAGE[self.sys_arch])
@property
def squash(self):
"""Return True or False if squash is active."""
return self._data[ATTR_SQUASH]
@property
def additional_args(self):
"""Return additional Docker build arguments."""
return self._data[ATTR_ARGS]
def get_docker_args(self, version):
"""Create a dict with Docker build arguments."""
args = {
'path': str(self.addon.path_location),
'tag': f"{self.addon.image}:{version}",
'pull': True,
'forcerm': True,
'squash': self.squash,
'labels': {
'io.hass.version': version,
'io.hass.arch': self.sys_arch,
'io.hass.type': META_ADDON,
'io.hass.name': self._fix_label('name'),
'io.hass.description': self._fix_label('description'),
},
'buildargs': {
'BUILD_FROM': self.base_image,
'BUILD_VERSION': version,
'BUILD_ARCH': self.sys_arch,
**self.additional_args,
}
}
if self.addon.url:
args['labels']['io.hass.url'] = self.addon.url
return args
def _fix_label(self, label_name):
"""Remove characters they are not supported."""
label = getattr(self.addon, label_name, "")
return label.replace("'", "")

136
hassio/addons/data.py Normal file
View File

@ -0,0 +1,136 @@
"""Init file for Hass.io add-on data."""
import logging
import json
from pathlib import Path
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .utils import extract_hash_from_path
from .validate import (
SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG)
from ..const import (
FILE_HASSIO_ADDONS, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
from ..coresys import CoreSysAttributes
from ..utils.json import JsonConfig, read_json_file
_LOGGER = logging.getLogger(__name__)
class AddonsData(JsonConfig, CoreSysAttributes):
"""Hold data for Add-ons inside Hass.io."""
def __init__(self, coresys):
"""Initialize data holder."""
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
self.coresys = coresys
self._repositories = {}
self._cache = {}
@property
def user(self):
"""Return local add-on user data."""
return self._data[ATTR_USER]
@property
def system(self):
"""Return local add-on data."""
return self._data[ATTR_SYSTEM]
@property
def cache(self):
"""Return add-on data from cache/repositories."""
return self._cache
@property
def repositories(self):
"""Return add-on data from repositories."""
return self._repositories
def reload(self):
"""Read data from add-on repository."""
self._cache = {}
self._repositories = {}
# read core repository
self._read_addons_folder(
self.sys_config.path_addons_core, REPOSITORY_CORE)
# read local repository
self._read_addons_folder(
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
# add built-in repositories information
self._set_builtin_repositories()
# read custom git repositories
for repository_element in self.sys_config.path_addons_git.iterdir():
if repository_element.is_dir():
self._read_git_repository(repository_element)
def _read_git_repository(self, path):
"""Process a custom repository folder."""
slug = extract_hash_from_path(path)
# exists repository json
repository_file = Path(path, "repository.json")
try:
repository_info = SCHEMA_REPOSITORY_CONFIG(
read_json_file(repository_file)
)
except (OSError, json.JSONDecodeError, UnicodeDecodeError):
_LOGGER.warning("Can't read repository information from %s",
repository_file)
return
except vol.Invalid:
_LOGGER.warning("Repository parse error %s", repository_file)
return
# process data
self._repositories[slug] = repository_info
self._read_addons_folder(path, slug)
def _read_addons_folder(self, path, repository):
"""Read data from add-ons folder."""
for addon in path.glob("**/config.json"):
try:
addon_config = read_json_file(addon)
# validate
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
# Generate slug
addon_slug = "{}_{}".format(
repository, addon_config[ATTR_SLUG])
# store
addon_config[ATTR_REPOSITORY] = repository
addon_config[ATTR_LOCATON] = str(addon.parent)
self._cache[addon_slug] = addon_config
except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read %s", addon)
except vol.Invalid as ex:
_LOGGER.warning("Can't read %s: %s", addon,
humanize_error(addon_config, ex))
def _set_builtin_repositories(self):
"""Add local built-in repository into dataset."""
try:
builtin_file = Path(__file__).parent.joinpath('built-in.json')
builtin_data = read_json_file(builtin_file)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.warning("Can't read built-in json: %s", err)
return
# core repository
self._repositories[REPOSITORY_CORE] = \
builtin_data[REPOSITORY_CORE]
# local repository
self._repositories[REPOSITORY_LOCAL] = \
builtin_data[REPOSITORY_LOCAL]

155
hassio/addons/git.py Normal file
View File

@ -0,0 +1,155 @@
"""Init file for Hass.io add-on Git."""
import asyncio
import logging
import functools as ft
from pathlib import Path
import shutil
import git
from .utils import get_hash_from_repository
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
from ..coresys import CoreSysAttributes
from ..validate import RE_REPOSITORY
_LOGGER = logging.getLogger(__name__)
class GitRepo(CoreSysAttributes):
"""Manage Add-on Git repository."""
def __init__(self, coresys, path, url):
"""Initialize Git base wrapper."""
self.coresys = coresys
self.repo = None
self.path = path
self.lock = asyncio.Lock(loop=coresys.loop)
self._data = RE_REPOSITORY.match(url).groupdict()
@property
def url(self):
"""Return repository URL."""
return self._data[ATTR_URL]
@property
def branch(self):
"""Return repository branch."""
return self._data[ATTR_BRANCH]
async def load(self):
"""Init Git add-on repository."""
if not self.path.is_dir():
return await self.clone()
async with self.lock:
try:
_LOGGER.info("Load add-on %s repository", self.path)
self.repo = await self.sys_run_in_executor(
git.Repo, str(self.path))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.GitCommandError) as err:
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
self._remove()
return False
return True
async def clone(self):
"""Clone git add-on repository."""
async with self.lock:
git_args = {
attribute: value
for attribute, value in (
('recursive', True),
('branch', self.branch),
('depth', 1),
('shallow-submodules', True)
) if value is not None
}
try:
_LOGGER.info("Clone add-on %s repository", self.url)
self.repo = await self.sys_run_in_executor(ft.partial(
git.Repo.clone_from, self.url, str(self.path),
**git_args
))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.GitCommandError) as err:
_LOGGER.error("Can't clone %s repository: %s.", self.url, err)
self._remove()
return False
return True
async def pull(self):
"""Pull Git add-on repo."""
if self.lock.locked():
_LOGGER.warning("It is already a task in progress")
return False
async with self.lock:
_LOGGER.info("Update add-on %s repository", self.url)
branch = self.repo.active_branch.name
try:
# Download data
await self.sys_run_in_executor(ft.partial(
self.repo.remotes.origin.fetch, **{
'update-shallow': True,
'depth': 1,
}))
# Jump on top of that
await self.sys_run_in_executor(ft.partial(
self.repo.git.reset, f"origin/{branch}", hard=True))
# Cleanup old data
await self.sys_run_in_executor(ft.partial(
self.repo.git.clean, "-xdf"))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.GitCommandError) as err:
_LOGGER.error("Can't update %s repo: %s.", self.url, err)
return False
return True
def _remove(self):
"""Remove a repository."""
if not self.path.is_dir():
return
def log_err(funct, path, _):
"""Log error."""
_LOGGER.warning("Can't remove %s", path)
shutil.rmtree(str(self.path), onerror=log_err)
class GitRepoHassIO(GitRepo):
"""Hass.io add-ons repository."""
def __init__(self, coresys):
"""Initialize Git Hass.io add-on repository."""
super().__init__(
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
class GitRepoCustom(GitRepo):
"""Custom add-ons repository."""
def __init__(self, coresys, url):
"""Initialize custom Git Hass.io addo-n repository."""
path = Path(
coresys.config.path_addons_git,
get_hash_from_repository(url))
super().__init__(coresys, path, url)
def remove(self):
"""Remove a custom repository."""
_LOGGER.info("Remove custom add-on repository %s", self.url)
self._remove()

View File

@ -0,0 +1,73 @@
"""Represent a Hass.io repository."""
from .git import GitRepoHassIO, GitRepoCustom
from .utils import get_hash_from_repository
from ..const import (
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
UNKNOWN = 'unknown'
class Repository(CoreSysAttributes):
"""Repository in Hass.io."""
def __init__(self, coresys, repository):
"""Initialize repository object."""
self.coresys = coresys
self.source = None
self.git = None
if repository == REPOSITORY_LOCAL:
self._id = repository
elif repository == REPOSITORY_CORE:
self._id = repository
self.git = GitRepoHassIO(coresys)
else:
self._id = get_hash_from_repository(repository)
self.git = GitRepoCustom(coresys, repository)
self.source = repository
@property
def _mesh(self):
"""Return data struct repository."""
return self.sys_addons.data.repositories.get(self._id, {})
@property
def slug(self):
"""Return slug of repository."""
return self._id
@property
def name(self):
"""Return name of repository."""
return self._mesh.get(ATTR_NAME, UNKNOWN)
@property
def url(self):
"""Return URL of repository."""
return self._mesh.get(ATTR_URL, self.source)
@property
def maintainer(self):
"""Return url of repository."""
return self._mesh.get(ATTR_MAINTAINER, UNKNOWN)
async def load(self):
"""Load addon repository."""
if self.git:
return await self.git.load()
return True
async def update(self):
"""Update add-on repository."""
if self.git:
return await self.git.pull()
return True
def remove(self):
"""Remove add-on repository."""
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
raise APIError("Can't remove built-in repositories!")
self.git.remove()

108
hassio/addons/utils.py Normal file
View File

@ -0,0 +1,108 @@
"""Util add-ons functions."""
import asyncio
import hashlib
import logging
import re
from ..const import (
SECURITY_DISABLE, SECURITY_PROFILE, PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO, PRIVILEGED_SYS_PTRACE,
PRIVILEGED_DAC_READ_SEARCH, ROLE_ADMIN, ROLE_MANAGER)
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
_LOGGER = logging.getLogger(__name__)
def rating_security(addon):
"""Return 1-6 for security rating.
1 = not secure
6 = high secure
"""
rating = 5
# AppArmor
if addon.apparmor == SECURITY_DISABLE:
rating += -1
elif addon.apparmor == SECURITY_PROFILE:
rating += 1
# Home Assistant Login
if addon.access_auth_api:
rating += 1
# Privileged options
if any(privilege in addon.privileged
for privilege in (PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO, PRIVILEGED_SYS_PTRACE,
PRIVILEGED_DAC_READ_SEARCH)):
rating += -1
# API Hass.io role
if addon.hassio_role == ROLE_MANAGER:
rating += -1
elif addon.hassio_role == ROLE_ADMIN:
rating += -2
# Not secure Networking
if addon.host_network:
rating += -1
# Insecure PID namespace
if addon.host_pid:
rating += -2
# Full Access
if addon.with_full_access:
rating += -2
# Docker Access
if addon.access_docker_api:
rating = 1
return max(min(6, rating), 1)
def get_hash_from_repository(name):
"""Generate a hash from repository."""
key = name.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(path):
"""Extract repo id from path."""
repo_dir = path.parts[-1]
if not RE_SHA1.match(repo_dir):
return get_hash_from_repository(repo_dir)
return repo_dir
def check_installed(method):
"""Wrap function with check if add-on is installed."""
async def wrap_check(addon, *args, **kwargs):
"""Return False if not installed or the function."""
if not addon.is_installed:
_LOGGER.error("Addon %s is not installed", addon.slug)
return False
return await method(addon, *args, **kwargs)
return wrap_check
async def remove_data(folder):
"""Remove folder and reset privileged."""
try:
proc = await asyncio.create_subprocess_exec(
"rm", "-rf", str(folder),
stdout=asyncio.subprocess.DEVNULL
)
_, error_msg = await proc.communicate()
except OSError as err:
error_msg = str(err)
if proc.returncode == 0:
return
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)

343
hassio/addons/validate.py Normal file
View File

@ -0,0 +1,343 @@
"""Validate add-ons options schema."""
import logging
import re
import uuid
import voluptuous as vol
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API, ATTR_PROTECTED,
ATTR_FULL_ACCESS, ATTR_ACCESS_TOKEN, ATTR_HOST_PID, ATTR_HASSIO_ROLE,
ATTR_MACHINE, ATTR_AUTH_API,
PRIVILEGED_NET_ADMIN, PRIVILEGED_SYS_ADMIN, PRIVILEGED_SYS_RAWIO,
PRIVILEGED_IPC_LOCK, PRIVILEGED_SYS_TIME, PRIVILEGED_SYS_NICE,
PRIVILEGED_SYS_RESOURCE, PRIVILEGED_SYS_PTRACE, PRIVILEGED_DAC_READ_SEARCH,
ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_MANAGER, ROLE_ADMIN, ROLE_BACKUP)
from ..validate import (
NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE, UUID_MATCH, SHA256)
from ..services.validate import DISCOVERY_SERVICES
_LOGGER = logging.getLogger(__name__)
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
V_STR = 'str'
V_INT = 'int'
V_FLOAT = 'float'
V_BOOL = 'bool'
V_EMAIL = 'email'
V_URL = 'url'
V_PORT = 'port'
V_MATCH = 'match'
RE_SCHEMA_ELEMENT = re.compile(
r"^(?:"
r"|str|bool|email|url|port"
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
r"|match\((?P<match>.*)\)"
r")\??$"
)
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
ARCH_ALL = [
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
]
MACHINE_ALL = [
'intel-nuc', 'qemux86', 'qemux86-64', 'qemuarm', 'qemuarm-64',
'raspberrypi', 'raspberrypi2', 'raspberrypi3', 'raspberrypi3-64',
'tinker', 'odroid-c2', 'odroid-xu',
]
STARTUP_ALL = [
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
STARTUP_APPLICATION
]
PRIVILEGED_ALL = [
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_IPC_LOCK,
PRIVILEGED_SYS_TIME,
PRIVILEGED_SYS_NICE,
PRIVILEGED_SYS_RESOURCE,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_DAC_READ_SEARCH,
]
ROLE_ALL = [
ROLE_DEFAULT,
ROLE_HOMEASSISTANT,
ROLE_BACKUP,
ROLE_MANAGER,
ROLE_ADMIN,
]
BASE_IMAGE = {
ARCH_ARMHF: "homeassistant/armhf-base:latest",
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
ARCH_I386: "homeassistant/i386-base:latest",
ARCH_AMD64: "homeassistant/amd64-base:latest",
}
def _simple_startup(value):
"""Simple startup schema."""
if value == "before":
return STARTUP_SERVICES
if value == "after":
return STARTUP_APPLICATION
return value
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
vol.Optional(ATTR_URL): vol.Url(),
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
vol.Required(ATTR_STARTUP):
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
vol.Required(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
vol.Optional(ATTR_WEBUI):
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS):
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [vol.In(DISCOVERY_SERVICES)],
vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
vol.Any(
SCHEMA_ELEMENT,
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
),
], vol.Schema({
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
}))
}), False),
vol.Optional(ATTR_IMAGE):
vol.Match(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$"),
vol.Optional(ATTR_TIMEOUT, default=10):
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Optional(ATTR_URL): vol.Url(),
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema({
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
}),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
vol.Coerce(str): vol.Coerce(str)
}),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_USER = vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
vol.Optional(ATTR_OPTIONS, default=dict): dict,
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
vol.Optional(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
}, extra=vol.REMOVE_EXTRA)
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
vol.Required(ATTR_LOCATON): vol.Coerce(str),
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
})
SCHEMA_ADDONS_FILE = vol.Schema({
vol.Optional(ATTR_USER, default=dict): {
vol.Coerce(str): SCHEMA_ADDON_USER,
},
vol.Optional(ATTR_SYSTEM, default=dict): {
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
}
})
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
vol.Required(ATTR_VERSION): vol.Coerce(str),
}, extra=vol.REMOVE_EXTRA)
def validate_options(raw_schema):
"""Validate schema."""
def validate(struct):
"""Create schema validator for add-ons options."""
options = {}
# read options
for key, value in struct.items():
# Ignore unknown options / remove from list
if key not in raw_schema:
_LOGGER.warning("Unknown options %s", key)
continue
typ = raw_schema[key]
try:
if isinstance(typ, list):
# nested value list
options[key] = _nested_validate_list(typ[0], value, key)
elif isinstance(typ, dict):
# nested value dict
options[key] = _nested_validate_dict(typ, value, key)
else:
# normal value
options[key] = _single_validate(typ, value, key)
except (IndexError, KeyError):
raise vol.Invalid(f"Type error for {key}") from None
_check_missing_options(raw_schema, options, 'root')
return options
return validate
# pylint: disable=no-value-for-parameter
# pylint: disable=inconsistent-return-statements
def _single_validate(typ, value, key):
"""Validate a single element."""
# if required argument
if value is None:
raise vol.Invalid(f"Missing required option '{key}'")
# parse extend data from type
match = RE_SCHEMA_ELEMENT.match(typ)
# prepare range
range_args = {}
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
group_value = match.group(group_name)
if group_value:
range_args[group_name[2:]] = float(group_value)
if typ.startswith(V_STR):
return str(value)
elif typ.startswith(V_INT):
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
elif typ.startswith(V_FLOAT):
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
elif typ.startswith(V_BOOL):
return vol.Boolean()(value)
elif typ.startswith(V_EMAIL):
return vol.Email()(value)
elif typ.startswith(V_URL):
return vol.Url()(value)
elif typ.startswith(V_PORT):
return NETWORK_PORT(value)
elif typ.startswith(V_MATCH):
return vol.Match(match.group('match'))(str(value))
raise vol.Invalid(f"Fatal error for {key} type {typ}")
def _nested_validate_list(typ, data_list, key):
"""Validate nested items."""
options = []
for element in data_list:
# Nested?
if isinstance(typ, dict):
c_options = _nested_validate_dict(typ, element, key)
options.append(c_options)
else:
options.append(_single_validate(typ, element, key))
return options
def _nested_validate_dict(typ, data_dict, key):
"""Validate nested items."""
options = {}
for c_key, c_value in data_dict.items():
# Ignore unknown options / remove from list
if c_key not in typ:
_LOGGER.warning("Unknown options %s", c_key)
continue
# Nested?
if isinstance(typ[c_key], list):
options[c_key] = _nested_validate_list(typ[c_key][0],
c_value, c_key)
else:
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
_check_missing_options(typ, options, key)
return options
def _check_missing_options(origin, exists, root):
"""Check if all options are exists."""
missing = set(origin) - set(exists)
for miss_opt in missing:
if isinstance(origin[miss_opt], str) and \
origin[miss_opt].endswith("?"):
continue
raise vol.Invalid(f"Missing option {miss_opt} in {root}")

283
hassio/api/__init__.py Normal file
View File

@ -0,0 +1,283 @@
"""Init file for Hass.io RESTful API."""
import logging
from pathlib import Path
from aiohttp import web
from .addons import APIAddons
from .auth import APIAuth
from .discovery import APIDiscovery
from .homeassistant import APIHomeAssistant
from .hardware import APIHardware
from .host import APIHost
from .hassos import APIHassOS
from .info import APIInfo
from .proxy import APIProxy
from .supervisor import APISupervisor
from .snapshots import APISnapshots
from .services import APIServices
from .security import SecurityMiddleware
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class RestAPI(CoreSysAttributes):
"""Handle RESTful API for Hass.io."""
def __init__(self, coresys):
"""Initialize Docker base wrapper."""
self.coresys = coresys
self.security = SecurityMiddleware(coresys)
self.webapp = web.Application(
middlewares=[self.security.token_validation], loop=coresys.loop)
# service stuff
self._runner = web.AppRunner(self.webapp)
self._site = None
async def load(self):
"""Register REST API Calls."""
self._register_supervisor()
self._register_host()
self._register_hassos()
self._register_hardware()
self._register_homeassistant()
self._register_proxy()
self._register_panel()
self._register_addons()
self._register_snapshots()
self._register_discovery()
self._register_services()
self._register_info()
self._register_auth()
def _register_host(self):
"""Register hostcontrol functions."""
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes([
web.get('/host/info', api_host.info),
web.post('/host/reboot', api_host.reboot),
web.post('/host/shutdown', api_host.shutdown),
web.post('/host/reload', api_host.reload),
web.post('/host/options', api_host.options),
web.get('/host/services', api_host.services),
web.post('/host/services/{service}/stop', api_host.service_stop),
web.post('/host/services/{service}/start', api_host.service_start),
web.post(
'/host/services/{service}/restart', api_host.service_restart),
web.post(
'/host/services/{service}/reload', api_host.service_reload),
])
def _register_hassos(self):
"""Register HassOS functions."""
api_hassos = APIHassOS()
api_hassos.coresys = self.coresys
self.webapp.add_routes([
web.get('/hassos/info', api_hassos.info),
web.post('/hassos/update', api_hassos.update),
web.post('/hassos/update/cli', api_hassos.update_cli),
web.post('/hassos/config/sync', api_hassos.config_sync),
])
def _register_hardware(self):
"""Register hardware functions."""
api_hardware = APIHardware()
api_hardware.coresys = self.coresys
self.webapp.add_routes([
web.get('/hardware/info', api_hardware.info),
web.get('/hardware/audio', api_hardware.audio),
])
def _register_info(self):
"""Register info functions."""
api_info = APIInfo()
api_info.coresys = self.coresys
self.webapp.add_routes([
web.get('/info', api_info.info),
])
def _register_auth(self):
"""Register auth functions."""
api_auth = APIAuth()
api_auth.coresys = self.coresys
self.webapp.add_routes([
web.post('/auth', api_auth.auth),
])
def _register_supervisor(self):
"""Register Supervisor functions."""
api_supervisor = APISupervisor()
api_supervisor.coresys = self.coresys
self.webapp.add_routes([
web.get('/supervisor/ping', api_supervisor.ping),
web.get('/supervisor/info', api_supervisor.info),
web.get('/supervisor/stats', api_supervisor.stats),
web.get('/supervisor/logs', api_supervisor.logs),
web.post('/supervisor/update', api_supervisor.update),
web.post('/supervisor/reload', api_supervisor.reload),
web.post('/supervisor/options', api_supervisor.options),
])
def _register_homeassistant(self):
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
api_hass.coresys = self.coresys
self.webapp.add_routes([
web.get('/homeassistant/info', api_hass.info),
web.get('/homeassistant/logs', api_hass.logs),
web.get('/homeassistant/stats', api_hass.stats),
web.post('/homeassistant/options', api_hass.options),
web.post('/homeassistant/update', api_hass.update),
web.post('/homeassistant/restart', api_hass.restart),
web.post('/homeassistant/stop', api_hass.stop),
web.post('/homeassistant/start', api_hass.start),
web.post('/homeassistant/check', api_hass.check),
])
def _register_proxy(self):
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
api_proxy.coresys = self.coresys
self.webapp.add_routes([
web.get('/homeassistant/api/websocket', api_proxy.websocket),
web.get('/homeassistant/websocket', api_proxy.websocket),
web.get('/homeassistant/api/stream', api_proxy.stream),
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
web.get('/homeassistant/api/', api_proxy.api),
])
def _register_addons(self):
"""Register Add-on functions."""
api_addons = APIAddons()
api_addons.coresys = self.coresys
self.webapp.add_routes([
web.get('/addons', api_addons.list),
web.post('/addons/reload', api_addons.reload),
web.get('/addons/{addon}/info', api_addons.info),
web.post('/addons/{addon}/install', api_addons.install),
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
web.post('/addons/{addon}/start', api_addons.start),
web.post('/addons/{addon}/stop', api_addons.stop),
web.post('/addons/{addon}/restart', api_addons.restart),
web.post('/addons/{addon}/update', api_addons.update),
web.post('/addons/{addon}/options', api_addons.options),
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
web.get('/addons/{addon}/logs', api_addons.logs),
web.get('/addons/{addon}/icon', api_addons.icon),
web.get('/addons/{addon}/logo', api_addons.logo),
web.get('/addons/{addon}/changelog', api_addons.changelog),
web.post('/addons/{addon}/stdin', api_addons.stdin),
web.post('/addons/{addon}/security', api_addons.security),
web.get('/addons/{addon}/stats', api_addons.stats),
])
def _register_snapshots(self):
"""Register snapshots functions."""
api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys
self.webapp.add_routes([
web.get('/snapshots', api_snapshots.list),
web.post('/snapshots/reload', api_snapshots.reload),
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
web.post('/snapshots/new/upload', api_snapshots.upload),
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
web.post('/snapshots/{snapshot}/restore/full',
api_snapshots.restore_full),
web.post('/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial),
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
])
def _register_services(self):
"""Register services functions."""
api_services = APIServices()
api_services.coresys = self.coresys
self.webapp.add_routes([
web.get('/services', api_services.list),
web.get('/services/{service}', api_services.get_service),
web.post('/services/{service}', api_services.set_service),
web.delete('/services/{service}', api_services.del_service),
])
def _register_discovery(self):
"""Register discovery functions."""
api_discovery = APIDiscovery()
api_discovery.coresys = self.coresys
self.webapp.add_routes([
web.get('/discovery', api_discovery.list),
web.get('/discovery/{uuid}', api_discovery.get_discovery),
web.delete('/discovery/{uuid}',
api_discovery.del_discovery),
web.post('/discovery', api_discovery.set_discovery),
])
def _register_panel(self):
"""Register panel for Home Assistant."""
panel_dir = Path(__file__).parent.joinpath("panel")
def create_response(panel_file):
"""Create a function to generate a response."""
path = panel_dir.joinpath(f"{panel_file!s}.html")
return lambda request: web.FileResponse(path)
# This route is for backwards compatibility with HA < 0.58
self.webapp.add_routes([
web.get('/panel', create_response('hassio-main-es5'))])
# This route is for backwards compatibility with HA 0.58 - 0.61
self.webapp.add_routes([
web.get('/panel_es5', create_response('hassio-main-es5')),
web.get('/panel_latest', create_response('hassio-main-latest')),
])
# This route is for backwards compatibility with HA 0.62 - 0.70
self.webapp.add_routes([
web.get('/app-es5/index.html', create_response('index')),
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
])
# This route is for HA > 0.70
self.webapp.add_routes([web.static('/app', panel_dir)])
async def start(self):
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
try:
await self._site.start()
except OSError as err:
_LOGGER.fatal(
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
else:
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
async def stop(self):
"""Stop RESTful API webserver."""
if not self._site:
return
# Shutdown running API
await self._site.stop()
await self._runner.cleanup()
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)

340
hassio/api/addons.py Normal file
View File

@ -0,0 +1,340 @@
"""Init file for Hass.io Home Assistant RESTful API."""
import asyncio
import logging
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .utils import api_process, api_process_raw, api_validate
from ..addons.utils import rating_security
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
from ..coresys import CoreSysAttributes
from ..validate import DOCKER_PORTS, ALSA_DEVICE
from ..exceptions import APIError
_LOGGER = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
})
# pylint: disable=no-value-for-parameter
SCHEMA_SECURITY = vol.Schema({
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
})
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def _extract_addon(self, request, check_installed=True):
"""Return addon, throw an exception it it doesn't exist."""
addon_slug = request.match_info.get('addon')
# Lookup itself
if addon_slug == 'self':
return request.get(REQUEST_FROM)
addon = self.sys_addons.get(addon_slug)
if not addon:
raise APIError("Addon does not exist")
if check_installed and not addon.is_installed:
raise APIError("Addon is not installed")
return addon
@api_process
async def list(self, request):
"""Return all add-ons or repositories."""
data_addons = []
for addon in self.sys_addons.list_addons:
data_addons.append({
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_VERSION: addon.last_version,
ATTR_INSTALLED: addon.version_installed,
ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached,
ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build,
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
})
data_repositories = []
for repository in self.sys_addons.list_repositories:
data_repositories.append({
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
ATTR_SOURCE: repository.source,
ATTR_URL: repository.url,
ATTR_MAINTAINER: repository.maintainer,
})
return {
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: data_repositories,
}
@api_process
async def reload(self, request):
"""Reload all add-on data."""
await asyncio.shield(self.sys_addons.reload())
return True
@api_process
async def info(self, request):
"""Return add-on information."""
addon = self._extract_addon(request, check_installed=False)
return {
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_VERSION: addon.version_installed,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_REPOSITORY: addon.repository,
ATTR_LAST_VERSION: addon.last_version,
ATTR_STATE: await addon.state(),
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_ARCH: addon.supported_arch,
ATTR_MACHINE: addon.supported_machine,
ATTR_URL: addon.url,
ATTR_DETACHED: addon.is_detached,
ATTR_AVAILABLE: addon.available,
ATTR_BUILD: addon.need_build,
ATTR_NETWORK: addon.ports,
ATTR_HOST_NETWORK: addon.host_network,
ATTR_HOST_PID: addon.host_pid,
ATTR_HOST_IPC: addon.host_ipc,
ATTR_HOST_DBUS: addon.host_dbus,
ATTR_PRIVILEGED: addon.privileged,
ATTR_FULL_ACCESS: addon.with_full_access,
ATTR_APPARMOR: addon.apparmor,
ATTR_DEVICES: _pretty_devices(addon),
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_CHANGELOG: addon.with_changelog,
ATTR_WEBUI: addon.webui,
ATTR_STDIN: addon.with_stdin,
ATTR_HASSIO_API: addon.access_hassio_api,
ATTR_HASSIO_ROLE: addon.hassio_role,
ATTR_AUTH_API: addon.access_auth_api,
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
ATTR_GPIO: addon.with_gpio,
ATTR_DEVICETREE: addon.with_devicetree,
ATTR_DOCKER_API: addon.access_docker_api,
ATTR_AUDIO: addon.with_audio,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_SERVICES: _pretty_services(addon),
ATTR_DISCOVERY: addon.discovery,
}
@api_process
async def options(self, request):
"""Store user options for add-on."""
addon = self._extract_addon(request)
addon_schema = SCHEMA_OPTIONS.extend({
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
})
body = await api_validate(addon_schema, request)
if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body:
addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE]
if ATTR_NETWORK in body:
addon.ports = body[ATTR_NETWORK]
if ATTR_AUDIO_INPUT in body:
addon.audio_input = body[ATTR_AUDIO_INPUT]
if ATTR_AUDIO_OUTPUT in body:
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
addon.save_data()
return True
@api_process
async def security(self, request):
"""Store security options for add-on."""
addon = self._extract_addon(request)
body = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body:
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED]
addon.save_data()
return True
@api_process
async def stats(self, request):
"""Return resource information."""
addon = self._extract_addon(request)
stats = await addon.stats()
if not stats:
raise APIError("No stats available")
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
def install(self, request):
"""Install add-on."""
addon = self._extract_addon(request, check_installed=False)
return asyncio.shield(addon.install())
@api_process
def uninstall(self, request):
"""Uninstall add-on."""
addon = self._extract_addon(request)
return asyncio.shield(addon.uninstall())
@api_process
def start(self, request):
"""Start add-on."""
addon = self._extract_addon(request)
# check options
options = addon.options
try:
addon.schema(options)
except vol.Invalid as ex:
raise APIError(humanize_error(options, ex)) from None
return asyncio.shield(addon.start())
@api_process
def stop(self, request):
"""Stop add-on."""
addon = self._extract_addon(request)
return asyncio.shield(addon.stop())
@api_process
def update(self, request):
"""Update add-on."""
addon = self._extract_addon(request)
if addon.last_version == addon.version_installed:
raise APIError("No update available!")
return asyncio.shield(addon.update())
@api_process
def restart(self, request):
"""Restart add-on."""
addon = self._extract_addon(request)
return asyncio.shield(addon.restart())
@api_process
def rebuild(self, request):
"""Rebuild local build add-on."""
addon = self._extract_addon(request)
if not addon.need_build:
raise APIError("Only local build addons are supported")
return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return logs from add-on."""
addon = self._extract_addon(request)
return addon.logs()
@api_process_raw(CONTENT_TYPE_PNG)
async def icon(self, request):
"""Return icon from add-on."""
addon = self._extract_addon(request, check_installed=False)
if not addon.with_icon:
raise APIError("No icon found!")
with addon.path_icon.open('rb') as png:
return png.read()
@api_process_raw(CONTENT_TYPE_PNG)
async def logo(self, request):
"""Return logo from add-on."""
addon = self._extract_addon(request, check_installed=False)
if not addon.with_logo:
raise APIError("No logo found!")
with addon.path_logo.open('rb') as png:
return png.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def changelog(self, request):
"""Return changelog from add-on."""
addon = self._extract_addon(request, check_installed=False)
if not addon.with_changelog:
raise APIError("No changelog found!")
with addon.path_changelog.open('r') as changelog:
return changelog.read()
@api_process
async def stdin(self, request):
"""Write to stdin of add-on."""
addon = self._extract_addon(request)
if not addon.with_stdin:
raise APIError("STDIN not supported by add-on")
data = await request.read()
return await asyncio.shield(addon.write_stdin(data))
def _pretty_devices(addon):
"""Return a simplified device list."""
dev_list = addon.devices
if not dev_list:
return None
return [row.split(':')[0] for row in dev_list]
def _pretty_services(addon):
"""Return a simplified services role list."""
services = []
for name, access in addon.services_role.items():
services.append(f"{name}:{access}")
return services

61
hassio/api/auth.py Normal file
View File

@ -0,0 +1,61 @@
"""Init file for Hass.io auth/SSO RESTful API."""
import logging
from aiohttp import BasicAuth
from aiohttp.web_exceptions import HTTPUnauthorized
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
from .utils import api_process
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
_LOGGER = logging.getLogger(__name__)
class APIAuth(CoreSysAttributes):
"""Handle RESTful API for auth functions."""
def _process_basic(self, request, addon):
"""Process login request with basic auth.
Return a coroutine.
"""
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
return self.sys_auth.check_login(addon, auth.login, auth.password)
def _process_dict(self, request, addon, data):
"""Process login with dict data.
Return a coroutine.
"""
username = data.get('username') or data.get('user')
password = data.get('password')
return self.sys_auth.check_login(addon, username, password)
@api_process
async def auth(self, request):
"""Process login request."""
addon = request[REQUEST_FROM]
if not addon.access_auth_api:
raise APIForbidden("Can't use Home Assistant auth!")
# BasicAuth
if AUTHORIZATION in request.headers:
return await self._process_basic(request, addon)
# Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json()
return await self._process_dict(request, addon, data)
# URL encoded
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
data = await request.post()
return await self._process_dict(request, addon, data)
raise HTTPUnauthorized(headers={
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
})

91
hassio/api/discovery.py Normal file
View File

@ -0,0 +1,91 @@
"""Init file for Hass.io network RESTful API."""
import voluptuous as vol
from .utils import api_process, api_validate
from ..const import (
ATTR_ADDON, ATTR_UUID, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE,
REQUEST_FROM)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden
from ..validate import SERVICE_ALL
SCHEMA_DISCOVERY = vol.Schema({
vol.Required(ATTR_SERVICE): SERVICE_ALL,
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
})
class APIDiscovery(CoreSysAttributes):
"""Handle RESTful API for discovery functions."""
def _extract_message(self, request):
"""Extract discovery message from URL."""
message = self.sys_discovery.get(request.match_info.get('uuid'))
if not message:
raise APIError("Discovery message not found")
return message
def _check_permission_ha(self, request):
"""Check permission for API call / Home Assistant."""
if request[REQUEST_FROM] != self.sys_homeassistant:
raise APIForbidden("Only HomeAssistant can use this API!")
@api_process
async def list(self, request):
"""Show register services."""
self._check_permission_ha(request)
discovery = []
for message in self.sys_discovery.list_messages:
discovery.append({
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
})
return {ATTR_DISCOVERY: discovery}
@api_process
async def set_discovery(self, request):
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
addon = request[REQUEST_FROM]
# Access?
if body[ATTR_SERVICE] not in addon.discovery:
raise APIForbidden(f"Can't use discovery!")
# Process discovery message
message = self.sys_discovery.send(addon, **body)
return {ATTR_UUID: message.uuid}
@api_process
async def get_discovery(self, request):
"""Read data into a discovery message."""
message = self._extract_message(request)
# HomeAssistant?
self._check_permission_ha(request)
return {
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
@api_process
async def del_discovery(self, request):
"""Delete data into a discovery message."""
message = self._extract_message(request)
addon = request[REQUEST_FROM]
# Permission
if message.addon != addon.slug:
raise APIForbidden(f"Can't remove discovery message")
self.sys_discovery.remove(message)
return True

34
hassio/api/hardware.py Normal file
View File

@ -0,0 +1,34 @@
"""Init file for Hass.io hardware RESTful API."""
import logging
from .utils import api_process
from ..const import (
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class APIHardware(CoreSysAttributes):
"""Handle RESTful API for hardware functions."""
@api_process
async def info(self, request):
"""Show hardware info."""
return {
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
ATTR_INPUT: list(self.sys_hardware.input_devices),
ATTR_DISK: list(self.sys_hardware.disk_devices),
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
ATTR_AUDIO: self.sys_hardware.audio_devices,
}
@api_process
async def audio(self, request):
"""Show ALSA audio devices."""
return {
ATTR_AUDIO: {
ATTR_INPUT: self.sys_host.alsa.input_devices,
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
}
}

53
hassio/api/hassos.py Normal file
View File

@ -0,0 +1,53 @@
"""Init file for Hass.io HassOS RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .utils import api_process, api_validate
from ..const import (
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
ATTR_VERSION_CLI_LATEST)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
class APIHassOS(CoreSysAttributes):
"""Handle RESTful API for HassOS functions."""
@api_process
async def info(self, request):
"""Return HassOS information."""
return {
ATTR_VERSION: self.sys_hassos.version,
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
ATTR_BOARD: self.sys_hassos.board,
}
@api_process
async def update(self, request):
"""Update HassOS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
await asyncio.shield(self.sys_hassos.update(version))
@api_process
async def update_cli(self, request):
"""Update HassOS CLI."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
await asyncio.shield(self.sys_hassos.update_cli(version))
@api_process
def config_sync(self, request):
"""Trigger config reload on HassOS."""
return asyncio.shield(self.sys_hassos.config_sync())

143
hassio/api/homeassistant.py Normal file
View File

@ -0,0 +1,143 @@
"""Init file for Hass.io Home Assistant RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
ATTR_REFRESH_TOKEN, CONTENT_TYPE_BINARY)
from ..coresys import CoreSysAttributes
from ..validate import NETWORK_PORT, DOCKER_IMAGE
from ..exceptions import APIError
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BOOT): vol.Boolean(),
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
vol.Maybe(vol.Coerce(str)),
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
vol.Any(None, DOCKER_IMAGE),
vol.Optional(ATTR_PORT): NETWORK_PORT,
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
vol.Optional(ATTR_WAIT_BOOT):
vol.All(vol.Coerce(int), vol.Range(min=60)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
})
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_VERSION: self.sys_homeassistant.version,
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
ATTR_MACHINE: self.sys_homeassistant.machine,
ATTR_IMAGE: self.sys_homeassistant.image,
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
ATTR_BOOT: self.sys_homeassistant.boot,
ATTR_PORT: self.sys_homeassistant.api_port,
ATTR_SSL: self.sys_homeassistant.api_ssl,
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
}
@api_process
async def options(self, request):
"""Set Home Assistant options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT]
if ATTR_PORT in body:
self.sys_homeassistant.api_port = body[ATTR_PORT]
if ATTR_PASSWORD in body:
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
if ATTR_SSL in body:
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
if ATTR_WATCHDOG in body:
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
if ATTR_WAIT_BOOT in body:
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_REFRESH_TOKEN in body:
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
self.sys_homeassistant.save_data()
@api_process
async def stats(self, request):
"""Return resource information."""
stats = await self.sys_homeassistant.stats()
if not stats:
raise APIError("No stats available")
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
async def update(self, request):
"""Update Home Assistant."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
await asyncio.shield(self.sys_homeassistant.update(version))
@api_process
def stop(self, request):
"""Stop Home Assistant."""
return asyncio.shield(self.sys_homeassistant.stop())
@api_process
def start(self, request):
"""Start Home Assistant."""
return asyncio.shield(self.sys_homeassistant.start())
@api_process
def restart(self, request):
"""Restart Home Assistant."""
return asyncio.shield(self.sys_homeassistant.restart())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return Home Assistant Docker logs."""
return self.sys_homeassistant.logs()
@api_process
async def check(self, request):
"""Check configuration of Home Assistant."""
result = await self.sys_homeassistant.check_config()
if not result.valid:
raise APIError(result.log)

101
hassio/api/host.py Normal file
View File

@ -0,0 +1,101 @@
"""Init file for Hass.io host RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .utils import api_process, api_validate
from ..const import (
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
ATTR_SERVICES, ATTR_CPE)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SERVICE = 'service'
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions."""
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_CPE: self.sys_host.info.cpe,
ATTR_FEATURES: self.sys_host.supperted_features,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_KERNEL: self.sys_host.info.kernel,
}
@api_process
async def options(self, request):
"""Edit host settings."""
body = await api_validate(SCHEMA_OPTIONS, request)
# hostname
if ATTR_HOSTNAME in body:
await asyncio.shield(
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
@api_process
def reboot(self, request):
"""Reboot host."""
return asyncio.shield(self.sys_host.control.reboot())
@api_process
def shutdown(self, request):
"""Poweroff host."""
return asyncio.shield(self.sys_host.control.shutdown())
@api_process
def reload(self, request):
"""Reload host data."""
return asyncio.shield(self.sys_host.reload())
@api_process
async def services(self, request):
"""Return list of available services."""
services = []
for unit in self.sys_host.services:
services.append({
ATTR_NAME: unit.name,
ATTR_DESCRIPTON: unit.description,
ATTR_STATE: unit.state,
})
return {
ATTR_SERVICES: services
}
@api_process
def service_start(self, request):
"""Start a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.start(unit))
@api_process
def service_stop(self, request):
"""Stop a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.stop(unit))
@api_process
def service_reload(self, request):
"""Reload a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.reload(unit))
@api_process
def service_restart(self, request):
"""Restart a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.restart(unit))

27
hassio/api/info.py Normal file
View File

@ -0,0 +1,27 @@
"""Init file for Hass.io info RESTful API."""
import logging
from .utils import api_process
from ..const import (
ATTR_HOMEASSISTANT, ATTR_SUPERVISOR, ATTR_MACHINE, ATTR_ARCH, ATTR_HASSOS,
ATTR_CHANNEL, ATTR_HOSTNAME)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class APIInfo(CoreSysAttributes):
"""Handle RESTful API for info functions."""
@api_process
async def info(self, request):
"""Show system info."""
return {
ATTR_SUPERVISOR: self.sys_supervisor.version,
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
ATTR_HASSOS: self.sys_hassos.version,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch,
ATTR_CHANNEL: self.sys_updater.channel,
}

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,820 @@
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{100:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(89),i=t.n(e),o=t(91),u=t.n(o),a=i.a,c=u.a}}]);
//# sourceMappingURL=chunk.9e3883f96f68b3ce89f5.js.map

Binary file not shown.

View File

@ -0,0 +1 @@
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,IAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.9e3883f96f68b3ce89f5.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,471 @@
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],a=0,f=[];a<i.length;a++)o=i[a],r[o]&&f.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(c&&c(n);f.length;)f.shift()()}var t={},r={1:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,a=document.getElementsByTagName("head")[0],c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"a11f1ea4fdc02a5118f5",2:"8c049a124b9397e54c16",3:"c46dad04f4e2184b0607",4:"9e3883f96f68b3ce89f5",5:"0cb8b788b03dcc48da14",6:"762ffcdfd97d94779103",7:"0853908528652fbc5d4f"}[e]+".js"}(e),u=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){u({type:"timeout",target:c})},12e4);c.onerror=c.onload=u,a.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var a=0;a<i.length;a++)n(i[a]);var c=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(2)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(6),t.e(3)]).then(t.bind(null,1))})}]);
//# sourceMappingURL=entrypoint.js.map

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@ -0,0 +1,38 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Hass.io</title>
<meta name='viewport' content='width=device-width, user-scalable=no'>
<style>
body {
height: 100vh;
margin: 0;
padding: 0;
}
</style>
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
</head>
<body>
<hassio-app></hassio-app>
<script>
function addScript(src) {
var e = document.createElement('script');
e.src = src;
document.write(e.outerHTML);
}
var webComponentsSupported = (
'customElements' in window &&
'import' in document.createElement('link') &&
'content' in document.createElement('template'));
if (!webComponentsSupported) {
addScript('/static/webcomponents-lite.js');
}
</script>
<!--
Disabled while we make Home Assistant able to serve the right files.
<script src="./app.js"></script>
-->
<link rel='import' href='./hassio-app.html'>
</body>
</html>

Binary file not shown.

255
hassio/api/proxy.py Normal file
View File

@ -0,0 +1,255 @@
"""Utils for Home Assistant Proxy."""
import asyncio
from contextlib import asynccontextmanager
import logging
import aiohttp
from aiohttp import web
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
from aiohttp.client_exceptions import ClientConnectorError
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION
import async_timeout
from ..const import HEADER_HA_ACCESS
from ..coresys import CoreSysAttributes
from ..exceptions import (
HomeAssistantAuthError, HomeAssistantAPIError, APIError)
_LOGGER = logging.getLogger(__name__)
class APIProxy(CoreSysAttributes):
"""API Proxy for Home Assistant."""
def _check_access(self, request):
"""Check the Hass.io token."""
if AUTHORIZATION in request.headers:
bearer = request.headers[AUTHORIZATION]
hassio_token = bearer.split(' ')[-1]
else:
hassio_token = request.headers.get(HEADER_HA_ACCESS)
addon = self.sys_addons.from_token(hassio_token)
if not addon:
_LOGGER.warning("Unknown Home Assistant API access!")
elif not addon.access_homeassistant_api:
_LOGGER.warning("Not permitted API access: %s", addon.slug)
else:
_LOGGER.info("%s access from %s", request.path, addon.slug)
return
raise HTTPUnauthorized()
@asynccontextmanager
async def _api_client(self, request, path, timeout=300):
"""Return a client request with proxy origin for Home Assistant."""
try:
# read data
with async_timeout.timeout(30):
data = await request.read()
if data:
content_type = request.content_type
else:
content_type = None
async with self.sys_homeassistant.make_request(
request.method.lower(), f'api/{path}',
content_type=content_type,
data=data,
timeout=timeout,
) as resp:
yield resp
return
except HomeAssistantAuthError:
_LOGGER.error("Authenticate error on API for request %s", path)
except HomeAssistantAPIError:
_LOGGER.error("Error on API for request %s", path)
except aiohttp.ClientError as err:
_LOGGER.error("Client error on API %s request %s", path, err)
except asyncio.TimeoutError:
_LOGGER.error("Client timeout error on API request %s", path)
raise HTTPBadGateway()
async def stream(self, request):
"""Proxy HomeAssistant EventStream Requests."""
self._check_access(request)
_LOGGER.info("Home Assistant EventStream start")
async with self._api_client(request, 'stream', timeout=None) as client:
response = web.StreamResponse()
response.content_type = request.headers.get(CONTENT_TYPE)
try:
await response.prepare(request)
async for data in client.content:
await response.write(data)
except (aiohttp.ClientError, aiohttp.ClientPayloadError):
pass
_LOGGER.info("Home Assistant EventStream close")
return response
async def api(self, request):
"""Proxy Home Assistant API Requests."""
self._check_access(request)
# Normal request
path = request.match_info.get('path', '')
async with self._api_client(request, path) as client:
data = await client.read()
return web.Response(
body=data,
status=client.status,
content_type=client.content_type
)
async def _websocket_client(self):
"""Initialize a WebSocket API connection."""
url = f"{self.sys_homeassistant.api_url}/api/websocket"
try:
client = await self.sys_websession_ssl.ws_connect(
url, heartbeat=30, verify_ssl=False)
# Handle authentication
data = await client.receive_json()
if data.get('type') == 'auth_ok':
return client
if data.get('type') != 'auth_required':
# Invalid protocol
_LOGGER.error(
"Got unexpected response from HA WebSocket: %s", data)
raise APIError()
if self.sys_homeassistant.refresh_token:
await self.sys_homeassistant.ensure_access_token()
await client.send_json({
'type': 'auth',
'access_token': self.sys_homeassistant.access_token,
})
else:
await client.send_json({
'type': 'auth',
'api_password': self.sys_homeassistant.api_password,
})
data = await client.receive_json()
if data.get('type') == 'auth_ok':
return client
# Renew the Token is invalid
if (data.get('type') == 'invalid_auth' and
self.sys_homeassistant.refresh_token):
self.sys_homeassistant.access_token = None
return await self._websocket_client()
raise HomeAssistantAuthError()
except (RuntimeError, ValueError, ClientConnectorError) as err:
_LOGGER.error("Client error on WebSocket API %s.", err)
except HomeAssistantAuthError:
_LOGGER.error("Failed authentication to Home Assistant WebSocket")
raise APIError()
async def websocket(self, request):
"""Initialize a WebSocket API connection."""
_LOGGER.info("Home Assistant WebSocket API request initialize")
# init server
server = web.WebSocketResponse(heartbeat=30)
await server.prepare(request)
# handle authentication
try:
await server.send_json({
'type': 'auth_required',
'ha_version': self.sys_homeassistant.version,
})
# Check API access
response = await server.receive_json()
hassio_token = (response.get('api_password') or
response.get('access_token'))
addon = self.sys_addons.from_token(hassio_token)
if not addon or not addon.access_homeassistant_api:
_LOGGER.warning("Unauthorized WebSocket access!")
await server.send_json({
'type': 'auth_invalid',
'message': 'Invalid access',
})
return server
_LOGGER.info("WebSocket access from %s", addon.slug)
await server.send_json({
'type': 'auth_ok',
'ha_version': self.sys_homeassistant.version,
})
except (RuntimeError, ValueError) as err:
_LOGGER.error("Can't initialize handshake: %s", err)
return server
# init connection to hass
try:
client = await self._websocket_client()
except APIError:
return server
_LOGGER.info("Home Assistant WebSocket API request running")
try:
client_read = None
server_read = None
while not server.closed and not client.closed:
if not client_read:
client_read = self.sys_create_task(
client.receive_str())
if not server_read:
server_read = self.sys_create_task(
server.receive_str())
# wait until data need to be processed
await asyncio.wait(
[client_read, server_read],
return_when=asyncio.FIRST_COMPLETED
)
# server
if server_read.done() and not client.closed:
server_read.exception()
await client.send_str(server_read.result())
server_read = None
# client
if client_read.done() and not server.closed:
client_read.exception()
await server.send_str(client_read.result())
client_read = None
except asyncio.CancelledError:
pass
except (RuntimeError, ConnectionError, TypeError) as err:
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
finally:
if client_read:
client_read.cancel()
if server_read:
server_read.cancel()
# close connections
if not client.closed:
await client.close()
if not server.closed:
await server.close()
_LOGGER.info("Home Assistant WebSocket API connection is closed")
return server

140
hassio/api/security.py Normal file
View File

@ -0,0 +1,140 @@
"""Handle security part of this API."""
import logging
import re
from aiohttp.web import middleware
from aiohttp.web_exceptions import HTTPUnauthorized, HTTPForbidden
from ..const import (
HEADER_TOKEN, REQUEST_FROM, ROLE_ADMIN, ROLE_DEFAULT, ROLE_HOMEASSISTANT,
ROLE_MANAGER, ROLE_BACKUP)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
# Block Anytime
BLACKLIST = re.compile(
r"^(?:"
r"|/homeassistant/api/hassio/.*"
r")$"
)
# Free to call or have own security concepts
NO_SECURITY_CHECK = re.compile(
r"^(?:"
r"|/homeassistant/api/.*"
r"|/homeassistant/websocket"
r"|/supervisor/ping"
r")$"
)
# Can called by every add-on
ADDONS_API_BYPASS = re.compile(
r"^(?:"
r"|/addons/self/(?!security|update)[^/]+"
r"|/info"
r"|/services.*"
r"|/discovery.*"
r"|/auth"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS = {
ROLE_DEFAULT: re.compile(
r"^(?:"
r"|/[^/]+/info"
r"|/addons"
r")$"
),
ROLE_HOMEASSISTANT: re.compile(
r"^(?:"
r"|/homeassistant/.+"
r")$"
),
ROLE_BACKUP: re.compile(
r"^(?:"
r"|/snapshots.*"
r")$"
),
ROLE_MANAGER: re.compile(
r"^(?:"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/hardware/.+"
r"|/hassos/.+"
r"|/supervisor/.+"
r"|/addons(?:/[^/]+/(?!security).+)?"
r"|/snapshots.*"
r")$"
),
ROLE_ADMIN: re.compile(
r".*"
),
}
class SecurityMiddleware(CoreSysAttributes):
"""Security middleware functions."""
def __init__(self, coresys):
"""Initialize security middleware."""
self.coresys = coresys
@middleware
async def token_validation(self, request, handler):
"""Check security access of this layer."""
request_from = None
hassio_token = request.headers.get(HEADER_TOKEN)
# Blacklist
if BLACKLIST.match(request.path):
_LOGGER.warning("%s is blacklisted!", request.path)
raise HTTPForbidden()
# Ignore security check
if NO_SECURITY_CHECK.match(request.path):
_LOGGER.debug("Passthrough %s", request.path)
return await handler(request)
# Not token
if not hassio_token:
_LOGGER.warning("No API token provided for %s", request.path)
raise HTTPUnauthorized()
# Home-Assistant
# UUID check need removed with 131
if hassio_token in (self.sys_homeassistant.uuid,
self.sys_homeassistant.hassio_token):
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
# Host
if hassio_token == self.sys_machine_id:
_LOGGER.debug("%s access from Host", request.path)
request_from = self.sys_host
# Add-on
addon = None
if hassio_token and not request_from:
addon = self.sys_addons.from_token(hassio_token)
# Check Add-on API access
if addon and ADDONS_API_BYPASS.match(request.path):
_LOGGER.debug("Passthrough %s from %s", request.path, addon.slug)
request_from = addon
elif addon and addon.access_hassio_api:
# Check Role
if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path):
_LOGGER.info("%s access from %s", request.path, addon.slug)
request_from = addon
else:
_LOGGER.warning("%s no role for %s", request.path, addon.slug)
if request_from:
request[REQUEST_FROM] = request_from
return await handler(request)
_LOGGER.error("Invalid token for access %s", request.path)
raise HTTPForbidden()

View File

@ -1,16 +1,11 @@
"""Init file for Supervisor network RESTful API."""
"""Init file for Hass.io network RESTful API."""
from ..const import (
ATTR_AVAILABLE,
ATTR_PROVIDERS,
ATTR_SERVICES,
ATTR_SLUG,
PROVIDE_SERVICE,
REQUEST_FROM,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden, APINotFound
from .utils import api_process, api_validate
from ..const import (
ATTR_AVAILABLE, ATTR_PROVIDERS, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM,
PROVIDE_SERVICE)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden
class APIServices(CoreSysAttributes):
@ -18,24 +13,22 @@ class APIServices(CoreSysAttributes):
def _extract_service(self, request):
"""Return service, throw an exception if it doesn't exist."""
service = self.sys_services.get(request.match_info.get("service"))
service = self.sys_services.get(request.match_info.get('service'))
if not service:
raise APINotFound("Service does not exist")
raise APIError("Service does not exist")
return service
@api_process
async def list_services(self, request):
async def list(self, request):
"""Show register services."""
services = []
for service in self.sys_services.list_services:
services.append(
{
ATTR_SLUG: service.slug,
ATTR_AVAILABLE: service.enabled,
ATTR_PROVIDERS: service.providers,
}
)
services.append({
ATTR_SLUG: service.slug,
ATTR_AVAILABLE: service.enabled,
ATTR_PROVIDERS: service.providers,
})
return {ATTR_SERVICES: services}
@ -47,7 +40,7 @@ class APIServices(CoreSysAttributes):
addon = request[REQUEST_FROM]
_check_access(request, service.slug)
await service.set_service_data(addon, body)
service.set_service_data(addon, body)
@api_process
async def get_service(self, request):
@ -69,7 +62,7 @@ class APIServices(CoreSysAttributes):
# Access
_check_access(request, service.slug, True)
await service.del_service_data(addon)
service.del_service_data(addon)
def _check_access(request, service, provide=False):

187
hassio/api/snapshots.py Normal file
View File

@ -0,0 +1,187 @@
"""Init file for Hass.io snapshot RESTful API."""
import asyncio
import logging
from pathlib import Path
from tempfile import TemporaryDirectory
from aiohttp import web
import voluptuous as vol
from .utils import api_process, api_validate
from ..snapshots.validate import ALL_FOLDERS
from ..const import (
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_PARTIAL = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS):
vol.All([vol.Coerce(str)], vol.Unique()),
vol.Optional(ATTR_FOLDERS):
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
})
SCHEMA_RESTORE_FULL = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
})
SCHEMA_SNAPSHOT_FULL = vol.Schema({
vol.Optional(ATTR_NAME): vol.Coerce(str),
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
})
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
vol.Optional(ATTR_ADDONS):
vol.All([vol.Coerce(str)], vol.Unique()),
vol.Optional(ATTR_FOLDERS):
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
})
class APISnapshots(CoreSysAttributes):
"""Handle RESTful API for snapshot functions."""
def _extract_snapshot(self, request):
"""Return snapshot, throw an exception if it doesn't exist."""
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
if not snapshot:
raise APIError("Snapshot does not exist")
return snapshot
@api_process
async def list(self, request):
"""Return snapshot list."""
data_snapshots = []
for snapshot in self.sys_snapshots.list_snapshots:
data_snapshots.append({
ATTR_SLUG: snapshot.slug,
ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date,
ATTR_TYPE: snapshot.sys_type,
ATTR_PROTECTED: snapshot.protected,
})
return {
ATTR_SNAPSHOTS: data_snapshots,
}
@api_process
async def reload(self, request):
"""Reload snapshot list."""
await asyncio.shield(self.sys_snapshots.reload())
return True
@api_process
async def info(self, request):
"""Return snapshot info."""
snapshot = self._extract_snapshot(request)
data_addons = []
for addon_data in snapshot.addons:
data_addons.append({
ATTR_SLUG: addon_data[ATTR_SLUG],
ATTR_NAME: addon_data[ATTR_NAME],
ATTR_VERSION: addon_data[ATTR_VERSION],
ATTR_SIZE: addon_data[ATTR_SIZE],
})
return {
ATTR_SLUG: snapshot.slug,
ATTR_TYPE: snapshot.sys_type,
ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date,
ATTR_SIZE: snapshot.size,
ATTR_PROTECTED: snapshot.protected,
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: snapshot.repositories,
ATTR_FOLDERS: snapshot.folders,
}
@api_process
async def snapshot_full(self, request):
"""Full-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
snapshot = await asyncio.shield(
self.sys_snapshots.do_snapshot_full(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False
@api_process
async def snapshot_partial(self, request):
"""Partial-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
snapshot = await asyncio.shield(
self.sys_snapshots.do_snapshot_partial(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False
@api_process
async def restore_full(self, request):
"""Full-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
return await asyncio.shield(
self.sys_snapshots.do_restore_full(snapshot, **body))
@api_process
async def restore_partial(self, request):
"""Partial-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
return await asyncio.shield(
self.sys_snapshots.do_restore_partial(snapshot, **body))
@api_process
async def remove(self, request):
"""Remove a snapshot."""
snapshot = self._extract_snapshot(request)
return self.sys_snapshots.remove(snapshot)
async def download(self, request):
"""Download a snapshot file."""
snapshot = self._extract_snapshot(request)
_LOGGER.info("Download snapshot %s", snapshot.slug)
response = web.FileResponse(snapshot.tarfile)
response.content_type = CONTENT_TYPE_TAR
return response
@api_process
async def upload(self, request):
"""Upload a snapshot file."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
tar_file = Path(temp_dir, f"snapshot.tar")
try:
with tar_file.open('wb') as snapshot:
async for data in request.content.iter_any():
snapshot.write(data)
except OSError as err:
_LOGGER.error("Can't write new snapshot file: %s", err)
return False
except asyncio.CancelledError:
return False
snapshot = await asyncio.shield(
self.sys_snapshots.import_snapshot(tar_file))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False

140
hassio/api/supervisor.py Normal file
View File

@ -0,0 +1,140 @@
"""Init file for Hass.io Supervisor RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
from ..coresys import CoreSysAttributes
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
from ..exceptions import APIError
from ..utils.validate import validate_timezone
_LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_CHANNEL): CHANNELS,
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
vol.Optional(ATTR_TIMEZONE): validate_timezone,
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
})
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
class APISupervisor(CoreSysAttributes):
"""Handle RESTful API for Supervisor functions."""
@api_process
async def ping(self, request):
"""Return ok for signal that the API is ready."""
return True
@api_process
async def info(self, request):
"""Return host information."""
list_addons = []
for addon in self.sys_addons.list_addons:
if addon.is_installed:
list_addons.append({
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_STATE: await addon.state(),
ATTR_VERSION: addon.last_version,
ATTR_INSTALLED: addon.version_installed,
ATTR_REPOSITORY: addon.repository,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
})
return {
ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_ARCH: self.sys_arch,
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
ATTR_TIMEZONE: self.sys_config.timezone,
ATTR_ADDONS: list_addons,
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
}
@api_process
async def options(self, request):
"""Set Supervisor options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_CHANNEL in body:
self.sys_updater.channel = body[ATTR_CHANNEL]
if ATTR_TIMEZONE in body:
self.sys_config.timezone = body[ATTR_TIMEZONE]
if ATTR_WAIT_BOOT in body:
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES])
await asyncio.shield(self.sys_addons.load_repositories(new))
self.sys_updater.save_data()
self.sys_config.save_data()
return True
@api_process
async def stats(self, request):
"""Return resource information."""
stats = await self.sys_supervisor.stats()
if not stats:
raise APIError("No stats available")
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
async def update(self, request):
"""Update Supervisor OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
if version == self.sys_supervisor.version:
raise APIError("Version {} is already in use".format(version))
return await asyncio.shield(
self.sys_supervisor.update(version))
@api_process
async def reload(self, request):
"""Reload add-ons, configuration, etc."""
tasks = [
self.sys_updater.reload(),
]
results, _ = await asyncio.shield(
asyncio.wait(tasks))
for result in results:
if result.exception() is not None:
raise APIError("Some reload task fails!")
return True
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return supervisor Docker logs."""
return self.sys_supervisor.logs()

Some files were not shown because too many files have changed in this diff Show More