mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-04 23:07:42 +00:00
Compare commits
No commits in common. "main" and "197" have entirely different histories.
43
.devcontainer/Dockerfile
Normal file
43
.devcontainer/Dockerfile
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
FROM python:3.7
|
||||||
|
|
||||||
|
WORKDIR /workspaces
|
||||||
|
|
||||||
|
# Install Node/Yarn for Frontent
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
apt-utils \
|
||||||
|
apt-transport-https \
|
||||||
|
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||||
|
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
nodejs \
|
||||||
|
yarn \
|
||||||
|
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
ENV NVM_DIR /root/.nvm
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
software-properties-common \
|
||||||
|
gpg-agent \
|
||||||
|
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||||
|
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
docker-ce \
|
||||||
|
docker-ce-cli \
|
||||||
|
containerd.io \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
|
COPY requirements.txt requirements_tests.txt ./
|
||||||
|
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||||
|
&& pip3 install tox \
|
||||||
|
&& rm -f requirements.txt requirements_tests.txt
|
||||||
|
|
||||||
|
# Set the default shell to bash instead of sh
|
||||||
|
ENV SHELL /bin/bash
|
@ -1,51 +1,31 @@
|
|||||||
|
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||||
{
|
{
|
||||||
"name": "Supervisor dev",
|
"name": "Hass.io dev",
|
||||||
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
|
"context": "..",
|
||||||
"containerEnv": {
|
"dockerFile": "Dockerfile",
|
||||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
"appPort": "9123:8123",
|
||||||
},
|
"runArgs": [
|
||||||
"remoteEnv": {
|
"-e",
|
||||||
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
|
"GIT_EDITOR=code --wait",
|
||||||
},
|
"--privileged"
|
||||||
"appPort": ["9123:8123", "7357:4357"],
|
],
|
||||||
"postCreateCommand": "bash devcontainer_setup",
|
"extensions": [
|
||||||
"postStartCommand": "bash devcontainer_bootstrap",
|
"ms-python.python",
|
||||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"customizations": {
|
"esbenp.prettier-vscode"
|
||||||
"vscode": {
|
],
|
||||||
"extensions": [
|
"settings": {
|
||||||
"charliermarsh.ruff",
|
"python.pythonPath": "/usr/local/bin/python",
|
||||||
"ms-python.pylint",
|
"python.linting.pylintEnabled": true,
|
||||||
"ms-python.vscode-pylance",
|
"python.linting.enabled": true,
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"python.formatting.provider": "black",
|
||||||
"redhat.vscode-yaml",
|
"python.formatting.blackArgs": [
|
||||||
"esbenp.prettier-vscode",
|
"--target-version",
|
||||||
"GitHub.vscode-pull-request-github"
|
"py37"
|
||||||
],
|
],
|
||||||
"settings": {
|
"editor.formatOnPaste": false,
|
||||||
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
"editor.formatOnSave": true,
|
||||||
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
|
"editor.formatOnType": true,
|
||||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
"files.trimTrailingWhitespace": true
|
||||||
"python.testing.pytestArgs": ["--no-cov"],
|
}
|
||||||
"pylint.importStrategy": "fromEnvironment",
|
}
|
||||||
"editor.formatOnPaste": false,
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"editor.formatOnType": true,
|
|
||||||
"files.trimTrailingWhitespace": true,
|
|
||||||
"terminal.integrated.profiles.linux": {
|
|
||||||
"zsh": {
|
|
||||||
"path": "/usr/bin/zsh"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
|
||||||
"[python]": {
|
|
||||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"mounts": [
|
|
||||||
"type=volume,target=/var/lib/docker",
|
|
||||||
"type=volume,target=/mnt/supervisor"
|
|
||||||
]
|
|
||||||
}
|
|
@ -14,10 +14,10 @@
|
|||||||
# virtualenv
|
# virtualenv
|
||||||
venv/
|
venv/
|
||||||
|
|
||||||
# Data
|
# HA
|
||||||
home-assistant-polymer/
|
home-assistant-polymer/*
|
||||||
script/
|
misc/*
|
||||||
tests/
|
script/*
|
||||||
|
|
||||||
# Test ENV
|
# Test ENV
|
||||||
data/
|
data/
|
||||||
|
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
<!-- READ THIS FIRST:
|
||||||
|
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||||
|
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||||
|
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||||
|
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||||
|
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||||
|
- If you have a problem with a Add-on, make a issue on there repository.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Home Assistant release with the issue:**
|
||||||
|
<!--
|
||||||
|
- Frontend -> Developer tools -> Info
|
||||||
|
- Or use this command: hass --version
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Operating environment (HassOS/Generic):**
|
||||||
|
<!--
|
||||||
|
Please provide details about your environment.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Supervisor logs:**
|
||||||
|
<!--
|
||||||
|
- Frontend -> Hass.io -> System
|
||||||
|
- Or use this command: hassio su logs
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
|
**Description of problem:**
|
||||||
|
|
96
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
96
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,96 +0,0 @@
|
|||||||
name: Report an issue with Home Assistant Supervisor
|
|
||||||
description: Report an issue related to the Home Assistant Supervisor.
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
This issue form is for reporting bugs with **supported** setups only!
|
|
||||||
|
|
||||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
|
||||||
|
|
||||||
[fr]: https://github.com/orgs/home-assistant/discussions
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: Describe the issue you are experiencing
|
|
||||||
description: Provide a clear and concise description of what the bug is.
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
## Environment
|
|
||||||
- type: dropdown
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: What type of installation are you running?
|
|
||||||
description: >
|
|
||||||
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
|
||||||
It is listed as the `Installation Type` value.
|
|
||||||
options:
|
|
||||||
- Home Assistant OS
|
|
||||||
- Home Assistant Supervised
|
|
||||||
- type: dropdown
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: Which operating system are you running on?
|
|
||||||
options:
|
|
||||||
- Home Assistant Operating System
|
|
||||||
- Debian
|
|
||||||
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
# Details
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: Steps to reproduce the issue
|
|
||||||
description: |
|
|
||||||
Please tell us exactly how to reproduce your issue.
|
|
||||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
|
||||||
value: |
|
|
||||||
1.
|
|
||||||
2.
|
|
||||||
3.
|
|
||||||
...
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: Anything in the Supervisor logs that might be useful for us?
|
|
||||||
description: >
|
|
||||||
Supervisor Logs can be found in [Settings -> System -> Logs](https://my.home-assistant.io/redirect/logs/)
|
|
||||||
then choose `Supervisor` in the top right.
|
|
||||||
|
|
||||||
[](https://my.home-assistant.io/redirect/supervisor_logs/)
|
|
||||||
render: txt
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: System information
|
|
||||||
description: >
|
|
||||||
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
|
||||||
Click the copy button at the bottom of the pop-up and paste it here.
|
|
||||||
|
|
||||||
[](https://my.home-assistant.io/redirect/system_health/)
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Supervisor diagnostics
|
|
||||||
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
|
|
||||||
description: >-
|
|
||||||
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
|
|
||||||
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
|
|
||||||
and select 'Download diagnostics'.
|
|
||||||
|
|
||||||
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Additional information
|
|
||||||
description: >
|
|
||||||
If you have any additional information for us, use the field below.
|
|
||||||
Please note, you can attach screenshots or screen recordings here, by
|
|
||||||
dragging and dropping files in the field below.
|
|
25
.github/ISSUE_TEMPLATE/config.yml
vendored
25
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,25 +0,0 @@
|
|||||||
blank_issues_enabled: false
|
|
||||||
contact_links:
|
|
||||||
- name: Report a bug/issues with an unsupported Supervisor
|
|
||||||
url: https://community.home-assistant.io
|
|
||||||
about: The Community guide can help or was updated to solve your issue
|
|
||||||
|
|
||||||
- name: Report a bug for the Supervisor panel
|
|
||||||
url: https://github.com/home-assistant/frontend/issues
|
|
||||||
about: The Supervisor panel is a part of the Home Assistant frontend
|
|
||||||
|
|
||||||
- name: Report incorrect or missing information on our developer documentation
|
|
||||||
url: https://github.com/home-assistant/developers.home-assistant.io/issues
|
|
||||||
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
|
||||||
|
|
||||||
- name: Request a feature for the Supervisor
|
|
||||||
url: https://github.com/orgs/home-assistant/discussions
|
|
||||||
about: Request an new feature for the Supervisor.
|
|
||||||
|
|
||||||
- name: I have a question or need support
|
|
||||||
url: https://www.home-assistant.io/help
|
|
||||||
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
|
||||||
|
|
||||||
- name: I'm unsure where to go?
|
|
||||||
url: https://www.home-assistant.io/join-chat
|
|
||||||
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
|
53
.github/ISSUE_TEMPLATE/task.yml
vendored
53
.github/ISSUE_TEMPLATE/task.yml
vendored
@ -1,53 +0,0 @@
|
|||||||
name: Task
|
|
||||||
description: For staff only - Create a task
|
|
||||||
type: Task
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
## ⚠️ RESTRICTED ACCESS
|
|
||||||
|
|
||||||
**This form is restricted to Open Home Foundation staff and authorized contributors only.**
|
|
||||||
|
|
||||||
If you are a community member wanting to contribute, please:
|
|
||||||
- For bug reports: Use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)
|
|
||||||
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### For authorized contributors
|
|
||||||
|
|
||||||
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
|
|
||||||
- type: textarea
|
|
||||||
id: description
|
|
||||||
attributes:
|
|
||||||
label: Description
|
|
||||||
description: |
|
|
||||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
|
||||||
|
|
||||||
Be specific about what needs to be done, why it's important, and any constraints or requirements.
|
|
||||||
placeholder: |
|
|
||||||
Describe the task, including:
|
|
||||||
- What needs to be done
|
|
||||||
- Why this task is needed
|
|
||||||
- Expected outcome
|
|
||||||
- Any constraints or requirements
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: additional_context
|
|
||||||
attributes:
|
|
||||||
label: Additional context
|
|
||||||
description: |
|
|
||||||
Any additional information, links, research, or context that would be helpful.
|
|
||||||
|
|
||||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
|
||||||
placeholder: |
|
|
||||||
- Roadmap opportunity: [link]
|
|
||||||
- Epic: [link]
|
|
||||||
- Feature request: [link]
|
|
||||||
- Technical design documents: [link]
|
|
||||||
- Prototype/mockup: [link]
|
|
||||||
- Dependencies: [links]
|
|
||||||
validations:
|
|
||||||
required: false
|
|
74
.github/PULL_REQUEST_TEMPLATE.md
vendored
74
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,74 +0,0 @@
|
|||||||
<!--
|
|
||||||
You are amazing! Thanks for contributing to our project!
|
|
||||||
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
|
|
||||||
-->
|
|
||||||
|
|
||||||
## Proposed change
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Describe the big picture of your changes here to communicate to the
|
|
||||||
maintainers why we should accept this pull request. If it fixes a bug
|
|
||||||
or resolves a feature request, be sure to link to that issue in the
|
|
||||||
additional information section.
|
|
||||||
-->
|
|
||||||
|
|
||||||
## Type of change
|
|
||||||
|
|
||||||
<!--
|
|
||||||
What type of change does your PR introduce to Home Assistant?
|
|
||||||
NOTE: Please, check only 1! box!
|
|
||||||
If your PR requires multiple boxes to be checked, you'll most likely need to
|
|
||||||
split it into multiple PRs. This makes things easier and faster to code review.
|
|
||||||
-->
|
|
||||||
|
|
||||||
- [ ] Dependency upgrade
|
|
||||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
|
||||||
- [ ] New feature (which adds functionality to the supervisor)
|
|
||||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
|
||||||
- [ ] Code quality improvements to existing code or addition of tests
|
|
||||||
|
|
||||||
## Additional information
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Details are important, and help maintainers processing your PR.
|
|
||||||
Please be sure to fill out additional details, if applicable.
|
|
||||||
-->
|
|
||||||
|
|
||||||
- This PR fixes or closes issue: fixes #
|
|
||||||
- This PR is related to issue:
|
|
||||||
- Link to documentation pull request:
|
|
||||||
- Link to cli pull request:
|
|
||||||
- Link to client library pull request:
|
|
||||||
|
|
||||||
## Checklist
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Put an `x` in the boxes that apply. You can also fill these out after
|
|
||||||
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
|
||||||
We're here to help! This is simply a reminder of what we are going to look
|
|
||||||
for before merging your code.
|
|
||||||
-->
|
|
||||||
|
|
||||||
- [ ] The code change is tested and works locally.
|
|
||||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
|
||||||
- [ ] There is no commented out code in this PR.
|
|
||||||
- [ ] I have followed the [development checklist][dev-checklist]
|
|
||||||
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
|
||||||
|
|
||||||
If API endpoints or add-on configuration are added/changed:
|
|
||||||
|
|
||||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
|
||||||
- [ ] [CLI][cli-repository] updated (if necessary)
|
|
||||||
- [ ] [Client library][client-library-repository] updated (if necessary)
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Thank you for contributing <3
|
|
||||||
|
|
||||||
Below, some useful links you could explore:
|
|
||||||
-->
|
|
||||||
|
|
||||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
|
||||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
|
||||||
[cli-repository]: https://github.com/home-assistant/cli
|
|
||||||
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/
|
|
288
.github/copilot-instructions.md
vendored
288
.github/copilot-instructions.md
vendored
@ -1,288 +0,0 @@
|
|||||||
# GitHub Copilot & Claude Code Instructions
|
|
||||||
|
|
||||||
This repository contains the Home Assistant Supervisor, a Python 3 based container
|
|
||||||
orchestration and management system for Home Assistant.
|
|
||||||
|
|
||||||
## Supervisor Capabilities & Features
|
|
||||||
|
|
||||||
### Architecture Overview
|
|
||||||
|
|
||||||
Home Assistant Supervisor is a Python-based container orchestration system that
|
|
||||||
communicates with the Docker daemon to manage containerized components. It is tightly
|
|
||||||
integrated with the underlying Operating System and core Operating System components
|
|
||||||
through D-Bus.
|
|
||||||
|
|
||||||
**Managed Components:**
|
|
||||||
- **Home Assistant Core**: The main home automation application running in its own
|
|
||||||
container (also provides the web interface)
|
|
||||||
- **Add-ons**: Third-party applications and services (each add-on runs in its own
|
|
||||||
container)
|
|
||||||
- **Plugins**: Built-in system services like DNS, Audio, CLI, Multicast, and Observer
|
|
||||||
- **Host System Integration**: OS-level operations and hardware access via D-Bus
|
|
||||||
- **Container Networking**: Internal Docker network management and external
|
|
||||||
connectivity
|
|
||||||
- **Storage & Backup**: Data persistence and backup management across all containers
|
|
||||||
|
|
||||||
**Key Dependencies:**
|
|
||||||
- **Docker Engine**: Required for all container operations
|
|
||||||
- **D-Bus**: System-level communication with the host OS
|
|
||||||
- **systemd**: Service management for host system operations
|
|
||||||
- **NetworkManager**: Network configuration and management
|
|
||||||
|
|
||||||
### Add-on System
|
|
||||||
|
|
||||||
**Add-on Architecture**: Add-ons are containerized applications available through
|
|
||||||
add-on stores. Each store contains multiple add-ons, and each add-on includes metadata
|
|
||||||
that tells Supervisor the version, startup configuration (permissions), and available
|
|
||||||
user configurable options. Add-on metadata typically references a container image that
|
|
||||||
Supervisor fetches during installation. If not, the Supervisor builds the container
|
|
||||||
image from a Dockerfile.
|
|
||||||
|
|
||||||
**Built-in Stores**: Supervisor comes with several pre-configured stores:
|
|
||||||
- **Core Add-ons**: Official add-ons maintained by the Home Assistant team
|
|
||||||
- **Community Add-ons**: Popular third-party add-ons repository
|
|
||||||
- **ESPHome**: Add-ons for ESPHome ecosystem integration
|
|
||||||
- **Music Assistant**: Audio and music-related add-ons
|
|
||||||
- **Local Development**: Local folder for testing custom add-ons during development
|
|
||||||
|
|
||||||
**Store Management**: Stores are Git-based repositories that are periodically updated.
|
|
||||||
When updates are available, users receive notifications.
|
|
||||||
|
|
||||||
**Add-on Lifecycle**:
|
|
||||||
- **Installation**: Supervisor fetches or builds container images based on add-on
|
|
||||||
metadata
|
|
||||||
- **Configuration**: Schema-validated options with integrated UI management
|
|
||||||
- **Runtime**: Full container lifecycle management, health monitoring
|
|
||||||
- **Updates**: Automatic or manual version management
|
|
||||||
|
|
||||||
### Update System
|
|
||||||
|
|
||||||
**Core Components**: Supervisor, Home Assistant Core, HAOS, and built-in plugins
|
|
||||||
receive version information from a central JSON file fetched from
|
|
||||||
`https://version.home-assistant.io/{channel}.json`. The `Updater` class handles
|
|
||||||
fetching this data, validating signatures, and updating internal version tracking.
|
|
||||||
|
|
||||||
**Update Channels**: Three channels (`stable`/`beta`/`dev`) determine which version
|
|
||||||
JSON file is fetched, allowing users to opt into different release streams.
|
|
||||||
|
|
||||||
**Add-on Updates**: Add-on version information comes from store repository updates, not
|
|
||||||
the central JSON file. When repositories are refreshed via the store system, add-ons
|
|
||||||
compare their local versions against repository versions to determine update
|
|
||||||
availability.
|
|
||||||
|
|
||||||
### Backup & Recovery System
|
|
||||||
|
|
||||||
**Backup Capabilities**:
|
|
||||||
- **Full Backups**: Complete system state capture including all add-ons,
|
|
||||||
configuration, and data
|
|
||||||
- **Partial Backups**: Selective backup of specific components (Home Assistant,
|
|
||||||
add-ons, folders)
|
|
||||||
- **Encrypted Backups**: Optional backup encryption with user-provided passwords
|
|
||||||
- **Multiple Storage Locations**: Local storage and remote backup destinations
|
|
||||||
|
|
||||||
**Recovery Features**:
|
|
||||||
- **One-click Restore**: Simple restoration from backup files
|
|
||||||
- **Selective Restore**: Choose specific components to restore
|
|
||||||
- **Automatic Recovery**: Self-healing for common system issues
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Supervisor Development
|
|
||||||
|
|
||||||
### Python Requirements
|
|
||||||
|
|
||||||
- **Compatibility**: Python 3.13+
|
|
||||||
- **Language Features**: Use modern Python features:
|
|
||||||
- Type hints with `typing` module
|
|
||||||
- f-strings (preferred over `%` or `.format()`)
|
|
||||||
- Dataclasses and enum classes
|
|
||||||
- Async/await patterns
|
|
||||||
- Pattern matching where appropriate
|
|
||||||
|
|
||||||
### Code Quality Standards
|
|
||||||
|
|
||||||
- **Formatting**: Ruff
|
|
||||||
- **Linting**: PyLint and Ruff
|
|
||||||
- **Type Checking**: MyPy
|
|
||||||
- **Testing**: pytest with asyncio support
|
|
||||||
- **Language**: American English for all code, comments, and documentation
|
|
||||||
|
|
||||||
### Code Organization
|
|
||||||
|
|
||||||
**Core Structure**:
|
|
||||||
```
|
|
||||||
supervisor/
|
|
||||||
├── __init__.py # Package initialization
|
|
||||||
├── const.py # Constants and enums
|
|
||||||
├── coresys.py # Core system management
|
|
||||||
├── bootstrap.py # System initialization
|
|
||||||
├── exceptions.py # Custom exception classes
|
|
||||||
├── api/ # REST API endpoints
|
|
||||||
├── addons/ # Add-on management
|
|
||||||
├── backups/ # Backup system
|
|
||||||
├── docker/ # Docker integration
|
|
||||||
├── host/ # Host system interface
|
|
||||||
├── homeassistant/ # Home Assistant Core management
|
|
||||||
├── dbus/ # D-Bus system integration
|
|
||||||
├── hardware/ # Hardware detection and management
|
|
||||||
├── plugins/ # Plugin system
|
|
||||||
├── resolution/ # Issue detection and resolution
|
|
||||||
├── security/ # Security management
|
|
||||||
├── services/ # Service discovery and management
|
|
||||||
├── store/ # Add-on store management
|
|
||||||
└── utils/ # Utility functions
|
|
||||||
```
|
|
||||||
|
|
||||||
**Shared Constants**: Use constants from `supervisor/const.py` instead of hardcoding
|
|
||||||
values. Define new constants following existing patterns and group related constants
|
|
||||||
together.
|
|
||||||
|
|
||||||
### Supervisor Architecture Patterns
|
|
||||||
|
|
||||||
**CoreSysAttributes Inheritance Pattern**: Nearly all major classes in Supervisor
|
|
||||||
inherit from `CoreSysAttributes`, providing access to the centralized system state
|
|
||||||
via `self.coresys` and convenient `sys_*` properties.
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Standard Supervisor class pattern
|
|
||||||
class MyManager(CoreSysAttributes):
|
|
||||||
"""Manage my functionality."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize manager."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self._component: MyComponent = MyComponent(coresys)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def component(self) -> MyComponent:
|
|
||||||
"""Return component handler."""
|
|
||||||
return self._component
|
|
||||||
|
|
||||||
# Access system components via inherited properties
|
|
||||||
async def do_something(self):
|
|
||||||
await self.sys_docker.containers.get("my_container")
|
|
||||||
self.sys_bus.fire_event(BusEvent.MY_EVENT, {"data": "value"})
|
|
||||||
```
|
|
||||||
|
|
||||||
**Key Inherited Properties from CoreSysAttributes**:
|
|
||||||
- `self.sys_docker` - Docker API access
|
|
||||||
- `self.sys_run_in_executor()` - Execute blocking operations
|
|
||||||
- `self.sys_create_task()` - Create async tasks
|
|
||||||
- `self.sys_bus` - Event bus for system events
|
|
||||||
- `self.sys_config` - System configuration
|
|
||||||
- `self.sys_homeassistant` - Home Assistant Core management
|
|
||||||
- `self.sys_addons` - Add-on management
|
|
||||||
- `self.sys_host` - Host system access
|
|
||||||
- `self.sys_dbus` - D-Bus system interface
|
|
||||||
|
|
||||||
**Load Pattern**: Many components implement a `load()` method which effectively
|
|
||||||
initialize the component from external sources (containers, files, D-Bus services).
|
|
||||||
|
|
||||||
### API Development
|
|
||||||
|
|
||||||
**REST API Structure**:
|
|
||||||
- **Base Path**: `/api/` for all endpoints
|
|
||||||
- **Authentication**: Bearer token authentication
|
|
||||||
- **Consistent Response Format**: `{"result": "ok", "data": {...}}` or
|
|
||||||
`{"result": "error", "message": "..."}`
|
|
||||||
- **Validation**: Use voluptuous schemas with `api_validate()`
|
|
||||||
|
|
||||||
**Use `@api_process` Decorator**: This decorator handles all standard error handling
|
|
||||||
and response formatting automatically. The decorator catches `APIError`, `HassioError`,
|
|
||||||
and other exceptions, returning appropriate HTTP responses.
|
|
||||||
|
|
||||||
```python
|
|
||||||
from ..api.utils import api_process, api_validate
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def backup_full(self, request: web.Request) -> dict[str, Any]:
|
|
||||||
"""Create full backup."""
|
|
||||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
|
||||||
job = await self.sys_backups.do_backup_full(**body)
|
|
||||||
return {ATTR_JOB_ID: job.uuid}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Docker Integration
|
|
||||||
|
|
||||||
- **Container Management**: Use Supervisor's Docker manager instead of direct
|
|
||||||
Docker API
|
|
||||||
- **Networking**: Supervisor manages internal Docker networks with predefined IP
|
|
||||||
ranges
|
|
||||||
- **Security**: AppArmor profiles, capability restrictions, and user namespace
|
|
||||||
isolation
|
|
||||||
- **Health Checks**: Implement health monitoring for all managed containers
|
|
||||||
|
|
||||||
### D-Bus Integration
|
|
||||||
|
|
||||||
- **Use dbus-fast**: Async D-Bus library for system integration
|
|
||||||
- **Service Management**: systemd, NetworkManager, hostname management
|
|
||||||
- **Error Handling**: Wrap D-Bus exceptions in Supervisor-specific exceptions
|
|
||||||
|
|
||||||
### Async Programming
|
|
||||||
|
|
||||||
- **All I/O operations must be async**: File operations, network calls, subprocess
|
|
||||||
execution
|
|
||||||
- **Use asyncio patterns**: Prefer `asyncio.gather()` over sequential awaits
|
|
||||||
- **Executor jobs**: Use `self.sys_run_in_executor()` for blocking operations
|
|
||||||
- **Two-phase initialization**: `__init__` for sync setup, `post_init()` for async
|
|
||||||
initialization
|
|
||||||
|
|
||||||
### Testing
|
|
||||||
|
|
||||||
- **Location**: `tests/` directory with module mirroring
|
|
||||||
- **Fixtures**: Extensive use of pytest fixtures for CoreSys setup
|
|
||||||
- **Mocking**: Mock external dependencies (Docker, D-Bus, network calls)
|
|
||||||
- **Coverage**: Minimum 90% test coverage, 100% for security-sensitive code
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
|
|
||||||
- **Custom Exceptions**: Defined in `exceptions.py` with clear inheritance hierarchy
|
|
||||||
- **Error Propagation**: Use `from` clause for exception chaining
|
|
||||||
- **API Errors**: Use `APIError` with appropriate HTTP status codes
|
|
||||||
|
|
||||||
### Security Considerations
|
|
||||||
|
|
||||||
- **Container Security**: AppArmor profiles mandatory for add-ons, minimal
|
|
||||||
capabilities
|
|
||||||
- **Authentication**: Token-based API authentication with role-based access
|
|
||||||
- **Data Protection**: Backup encryption, secure secret management, comprehensive
|
|
||||||
input validation
|
|
||||||
|
|
||||||
### Development Commands
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run tests, adjust paths as necessary
|
|
||||||
pytest -qsx tests/
|
|
||||||
|
|
||||||
# Linting and formatting
|
|
||||||
ruff check supervisor/
|
|
||||||
ruff format supervisor/
|
|
||||||
|
|
||||||
# Type checking
|
|
||||||
mypy --ignore-missing-imports supervisor/
|
|
||||||
|
|
||||||
# Pre-commit hooks
|
|
||||||
pre-commit run --all-files
|
|
||||||
```
|
|
||||||
|
|
||||||
Always run the pre-commit hooks at the end of code editing.
|
|
||||||
|
|
||||||
### Common Patterns to Follow
|
|
||||||
|
|
||||||
**✅ Use These Patterns**:
|
|
||||||
- Inherit from `CoreSysAttributes` for system access
|
|
||||||
- Use `@api_process` decorator for API endpoints
|
|
||||||
- Use `self.sys_run_in_executor()` for blocking operations
|
|
||||||
- Access Docker via `self.sys_docker` not direct Docker API
|
|
||||||
- Use constants from `const.py` instead of hardcoding
|
|
||||||
- Store types in (per-module) `const.py` (e.g. supervisor/store/const.py)
|
|
||||||
|
|
||||||
**❌ Avoid These Patterns**:
|
|
||||||
- Direct Docker API usage - use Supervisor's Docker manager
|
|
||||||
- Blocking operations in async context (use asyncio alternatives)
|
|
||||||
- Hardcoded values - use constants from `const.py`
|
|
||||||
- Manual error handling in API endpoints - let `@api_process` handle it
|
|
||||||
|
|
||||||
This guide provides the foundation for contributing to Home Assistant Supervisor.
|
|
||||||
Follow these patterns and guidelines to ensure code quality, security, and
|
|
||||||
maintainability.
|
|
14
.github/dependabot.yml
vendored
14
.github/dependabot.yml
vendored
@ -1,14 +0,0 @@
|
|||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: pip
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "06:00"
|
|
||||||
open-pull-requests-limit: 10
|
|
||||||
- package-ecosystem: "github-actions"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "06:00"
|
|
||||||
open-pull-requests-limit: 10
|
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||||
|
|
||||||
|
# Number of days of inactivity before a closed issue or pull request is locked
|
||||||
|
daysUntilLock: 1
|
||||||
|
|
||||||
|
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||||
|
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||||
|
skipCreatedBefore: 2020-01-01
|
||||||
|
|
||||||
|
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||||
|
exemptLabels: []
|
||||||
|
|
||||||
|
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||||
|
lockLabel: false
|
||||||
|
|
||||||
|
# Comment to post before locking. Set to `false` to disable
|
||||||
|
lockComment: false
|
||||||
|
|
||||||
|
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||||
|
setLockReason: false
|
||||||
|
|
||||||
|
# Limit to only `issues` or `pulls`
|
||||||
|
only: pulls
|
||||||
|
|
||||||
|
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||||
|
issues:
|
||||||
|
daysUntilLock: 30
|
48
.github/release-drafter.yml
vendored
48
.github/release-drafter.yml
vendored
@ -1,50 +1,4 @@
|
|||||||
change-template: "- #$NUMBER $TITLE @$AUTHOR"
|
|
||||||
sort-direction: ascending
|
|
||||||
|
|
||||||
categories:
|
|
||||||
- title: ":boom: Breaking Changes"
|
|
||||||
label: "breaking-change"
|
|
||||||
|
|
||||||
- title: ":wrench: Build"
|
|
||||||
label: "build"
|
|
||||||
|
|
||||||
- title: ":boar: Chore"
|
|
||||||
label: "chore"
|
|
||||||
|
|
||||||
- title: ":sparkles: New Features"
|
|
||||||
label: "new-feature"
|
|
||||||
|
|
||||||
- title: ":zap: Performance"
|
|
||||||
label: "performance"
|
|
||||||
|
|
||||||
- title: ":recycle: Refactor"
|
|
||||||
label: "refactor"
|
|
||||||
|
|
||||||
- title: ":green_heart: CI"
|
|
||||||
label: "ci"
|
|
||||||
|
|
||||||
- title: ":bug: Bug Fixes"
|
|
||||||
label: "bugfix"
|
|
||||||
|
|
||||||
- title: ":white_check_mark: Test"
|
|
||||||
label: "test"
|
|
||||||
|
|
||||||
- title: ":arrow_up: Dependency Updates"
|
|
||||||
label: "dependencies"
|
|
||||||
collapse-after: 1
|
|
||||||
|
|
||||||
include-labels:
|
|
||||||
- "breaking-change"
|
|
||||||
- "build"
|
|
||||||
- "chore"
|
|
||||||
- "performance"
|
|
||||||
- "refactor"
|
|
||||||
- "new-feature"
|
|
||||||
- "bugfix"
|
|
||||||
- "dependencies"
|
|
||||||
- "test"
|
|
||||||
- "ci"
|
|
||||||
|
|
||||||
template: |
|
template: |
|
||||||
|
## What's Changed
|
||||||
|
|
||||||
$CHANGES
|
$CHANGES
|
||||||
|
17
.github/stale.yml
vendored
Normal file
17
.github/stale.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Number of days of inactivity before an issue becomes stale
|
||||||
|
daysUntilStale: 60
|
||||||
|
# Number of days of inactivity before a stale issue is closed
|
||||||
|
daysUntilClose: 7
|
||||||
|
# Issues with these labels will never be considered stale
|
||||||
|
exemptLabels:
|
||||||
|
- pinned
|
||||||
|
- security
|
||||||
|
# Label to use when marking an issue as stale
|
||||||
|
staleLabel: wontfix
|
||||||
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
|
markComment: >
|
||||||
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
for your contributions.
|
||||||
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
|
closeComment: false
|
380
.github/workflows/builder.yml
vendored
380
.github/workflows/builder.yml
vendored
@ -1,380 +0,0 @@
|
|||||||
name: Build supervisor
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
channel:
|
|
||||||
description: "Channel"
|
|
||||||
required: true
|
|
||||||
default: "dev"
|
|
||||||
version:
|
|
||||||
description: "Version"
|
|
||||||
required: true
|
|
||||||
publish:
|
|
||||||
description: "Publish"
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
stable:
|
|
||||||
description: "Stable"
|
|
||||||
required: true
|
|
||||||
default: "false"
|
|
||||||
pull_request:
|
|
||||||
branches: ["main"]
|
|
||||||
release:
|
|
||||||
types: ["published"]
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
paths:
|
|
||||||
- "rootfs/**"
|
|
||||||
- "supervisor/**"
|
|
||||||
- build.yaml
|
|
||||||
- Dockerfile
|
|
||||||
- requirements.txt
|
|
||||||
- setup.py
|
|
||||||
|
|
||||||
env:
|
|
||||||
DEFAULT_PYTHON: "3.13"
|
|
||||||
BUILD_NAME: supervisor
|
|
||||||
BUILD_TYPE: supervisor
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
init:
|
|
||||||
name: Initialize build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
architectures: ${{ steps.info.outputs.architectures }}
|
|
||||||
version: ${{ steps.version.outputs.version }}
|
|
||||||
channel: ${{ steps.version.outputs.channel }}
|
|
||||||
publish: ${{ steps.version.outputs.publish }}
|
|
||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout the repository
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Get information
|
|
||||||
id: info
|
|
||||||
uses: home-assistant/actions/helpers/info@master
|
|
||||||
|
|
||||||
- name: Get version
|
|
||||||
id: version
|
|
||||||
uses: home-assistant/actions/helpers/version@master
|
|
||||||
with:
|
|
||||||
type: ${{ env.BUILD_TYPE }}
|
|
||||||
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed_files
|
|
||||||
if: steps.version.outputs.publish == 'false'
|
|
||||||
uses: masesgroup/retrieve-changed-files@v3.0.0
|
|
||||||
|
|
||||||
- name: Check if requirements files changed
|
|
||||||
id: requirements
|
|
||||||
run: |
|
|
||||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
|
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
build:
|
|
||||||
name: Build ${{ matrix.arch }} supervisor
|
|
||||||
needs: init
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
id-token: write
|
|
||||||
packages: write
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout the repository
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Write env-file
|
|
||||||
if: needs.init.outputs.requirements == 'true'
|
|
||||||
run: |
|
|
||||||
(
|
|
||||||
# Fix out of memory issues with rust
|
|
||||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
|
||||||
) > .env_file
|
|
||||||
|
|
||||||
- name: Build wheels
|
|
||||||
if: needs.init.outputs.requirements == 'true'
|
|
||||||
uses: home-assistant/wheels@2025.07.0
|
|
||||||
with:
|
|
||||||
abi: cp313
|
|
||||||
tag: musllinux_1_2
|
|
||||||
arch: ${{ matrix.arch }}
|
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
|
||||||
apk: "libffi-dev;openssl-dev;yaml-dev"
|
|
||||||
skip-binary: aiohttp
|
|
||||||
env-file: true
|
|
||||||
requirements: "requirements.txt"
|
|
||||||
|
|
||||||
- name: Set version
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: home-assistant/actions/helpers/version@master
|
|
||||||
with:
|
|
||||||
type: ${{ env.BUILD_TYPE }}
|
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: sigstore/cosign-installer@v3.9.2
|
|
||||||
with:
|
|
||||||
cosign-release: "v2.4.3"
|
|
||||||
|
|
||||||
- name: Install dirhash and calc hash
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
run: |
|
|
||||||
pip3 install setuptools dirhash
|
|
||||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
|
||||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
|
||||||
|
|
||||||
- name: Sign supervisor SHA256
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
run: |
|
|
||||||
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
|
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: docker/login-action@v3.4.0
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set build arguments
|
|
||||||
if: needs.init.outputs.publish == 'false'
|
|
||||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build supervisor
|
|
||||||
uses: home-assistant/builder@2025.03.0
|
|
||||||
with:
|
|
||||||
args: |
|
|
||||||
$BUILD_ARGS \
|
|
||||||
--${{ matrix.arch }} \
|
|
||||||
--target /data \
|
|
||||||
--cosign \
|
|
||||||
--generic ${{ needs.init.outputs.version }}
|
|
||||||
env:
|
|
||||||
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
|
||||||
|
|
||||||
version:
|
|
||||||
name: Update version
|
|
||||||
needs: ["init", "run_supervisor"]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout the repository
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
|
|
||||||
- name: Initialize git
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: home-assistant/actions/helpers/git-init@master
|
|
||||||
with:
|
|
||||||
name: ${{ secrets.GIT_NAME }}
|
|
||||||
email: ${{ secrets.GIT_EMAIL }}
|
|
||||||
token: ${{ secrets.GIT_TOKEN }}
|
|
||||||
|
|
||||||
- name: Update version file
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: home-assistant/actions/helpers/version-push@master
|
|
||||||
with:
|
|
||||||
key: ${{ env.BUILD_NAME }}
|
|
||||||
version: ${{ needs.init.outputs.version }}
|
|
||||||
channel: ${{ needs.init.outputs.channel }}
|
|
||||||
|
|
||||||
run_supervisor:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Run the Supervisor
|
|
||||||
needs: ["build", "init"]
|
|
||||||
timeout-minutes: 60
|
|
||||||
steps:
|
|
||||||
- name: Checkout the repository
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
|
|
||||||
- name: Build the Supervisor
|
|
||||||
if: needs.init.outputs.publish != 'true'
|
|
||||||
uses: home-assistant/builder@2025.03.0
|
|
||||||
with:
|
|
||||||
args: |
|
|
||||||
--test \
|
|
||||||
--amd64 \
|
|
||||||
--target /data \
|
|
||||||
--generic runner
|
|
||||||
|
|
||||||
- name: Pull Supervisor
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
run: |
|
|
||||||
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
|
||||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
|
||||||
|
|
||||||
- name: Create the Supervisor
|
|
||||||
run: |
|
|
||||||
mkdir -p /tmp/supervisor/data
|
|
||||||
docker create --name hassio_supervisor \
|
|
||||||
--privileged \
|
|
||||||
--security-opt seccomp=unconfined \
|
|
||||||
--security-opt apparmor=unconfined \
|
|
||||||
-v /run/docker.sock:/run/docker.sock \
|
|
||||||
-v /run/dbus:/run/dbus \
|
|
||||||
-v /tmp/supervisor/data:/data \
|
|
||||||
-v /etc/machine-id:/etc/machine-id:ro \
|
|
||||||
-e SUPERVISOR_SHARE="/tmp/supervisor/data" \
|
|
||||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
|
||||||
-e SUPERVISOR_DEV=1 \
|
|
||||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
|
||||||
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
|
||||||
|
|
||||||
- name: Start the Supervisor
|
|
||||||
run: docker start hassio_supervisor
|
|
||||||
|
|
||||||
- name: Wait for Supervisor to come up
|
|
||||||
run: |
|
|
||||||
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
|
|
||||||
ping="error"
|
|
||||||
while [ "$ping" != "ok" ]; do
|
|
||||||
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Check the Supervisor
|
|
||||||
run: |
|
|
||||||
echo "Checking supervisor info"
|
|
||||||
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Checking supervisor network info"
|
|
||||||
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check the Store / Addon
|
|
||||||
run: |
|
|
||||||
echo "Install Core SSH Add-on"
|
|
||||||
test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Make sure it actually installed
|
|
||||||
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
|
|
||||||
if [[ "$test" == "null" ]]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Start Core SSH Add-on"
|
|
||||||
test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Make sure its state is started
|
|
||||||
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
|
|
||||||
if [ "$test" != "started" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check the Supervisor code sign
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
run: |
|
|
||||||
echo "Enable Content-Trust"
|
|
||||||
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Run supervisor health check"
|
|
||||||
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Check supervisor unhealthy"
|
|
||||||
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
|
|
||||||
if [ "$test" != "" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Check supervisor supported"
|
|
||||||
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unsupported[]')
|
|
||||||
if [[ "$test" =~ source_mods ]]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Create full backup
|
|
||||||
id: backup
|
|
||||||
run: |
|
|
||||||
test=$(docker exec hassio_cli ha backups new --no-progress --raw-json)
|
|
||||||
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Uninstall SSH add-on
|
|
||||||
run: |
|
|
||||||
test=$(docker exec hassio_cli ha addons uninstall core_ssh --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Restart supervisor
|
|
||||||
run: |
|
|
||||||
test=$(docker exec hassio_cli ha supervisor restart --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Wait for Supervisor to come up
|
|
||||||
run: |
|
|
||||||
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
|
|
||||||
ping="error"
|
|
||||||
while [ "$ping" != "ok" ]; do
|
|
||||||
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Restore SSH add-on from backup
|
|
||||||
run: |
|
|
||||||
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --addons core_ssh --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Make sure it actually installed
|
|
||||||
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
|
|
||||||
if [[ "$test" == "null" ]]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Make sure its state is started
|
|
||||||
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
|
|
||||||
if [ "$test" != "started" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Restore SSL directory from backup
|
|
||||||
run: |
|
|
||||||
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
|
|
||||||
if [ "$test" != "ok" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Get supervisor logs on failiure
|
|
||||||
if: ${{ cancelled() || failure() }}
|
|
||||||
run: docker logs hassio_supervisor
|
|
19
.github/workflows/check_pr_labels.yml
vendored
19
.github/workflows/check_pr_labels.yml
vendored
@ -1,19 +0,0 @@
|
|||||||
name: Check PR
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches: ["main"]
|
|
||||||
types: [labeled, unlabeled, synchronize]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
init:
|
|
||||||
name: Check labels
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check labels
|
|
||||||
run: |
|
|
||||||
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
|
|
||||||
echo "$labels"
|
|
||||||
if [ "$labels" == "cla-signed" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
428
.github/workflows/ci.yaml
vendored
428
.github/workflows/ci.yaml
vendored
@ -1,428 +0,0 @@
|
|||||||
name: CI
|
|
||||||
|
|
||||||
# yamllint disable-line rule:truthy
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request: ~
|
|
||||||
|
|
||||||
env:
|
|
||||||
DEFAULT_PYTHON: "3.13"
|
|
||||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
|
||||||
MYPY_CACHE_VERSION: 1
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Separate job to pre-populate the base dependency cache
|
|
||||||
# This prevent upcoming jobs to do the same individually
|
|
||||||
prepare:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
python-version: ${{ steps.python.outputs.python-version }}
|
|
||||||
name: Prepare Python dependencies
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python
|
|
||||||
id: python
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Create Python virtual environment
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
python -m venv venv
|
|
||||||
. venv/bin/activate
|
|
||||||
pip install -U pip setuptools
|
|
||||||
pip install -r requirements.txt -r requirements_tests.txt
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
lookup-only: true
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pre-commit-
|
|
||||||
- name: Install pre-commit dependencies
|
|
||||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit install-hooks
|
|
||||||
|
|
||||||
lint-ruff-format:
|
|
||||||
name: Check ruff-format
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run ruff-format
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
|
||||||
env:
|
|
||||||
RUFF_OUTPUT_FORMAT: github
|
|
||||||
|
|
||||||
lint-ruff:
|
|
||||||
name: Check ruff
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run ruff
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
|
||||||
env:
|
|
||||||
RUFF_OUTPUT_FORMAT: github
|
|
||||||
|
|
||||||
lint-dockerfile:
|
|
||||||
name: Check Dockerfile
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Register hadolint problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
|
||||||
- name: Check Dockerfile
|
|
||||||
uses: docker://hadolint/hadolint:v1.18.0
|
|
||||||
with:
|
|
||||||
args: hadolint Dockerfile
|
|
||||||
|
|
||||||
lint-executable-shebangs:
|
|
||||||
name: Check executables
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Register check executables problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
|
||||||
- name: Run executables check
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
|
||||||
|
|
||||||
lint-json:
|
|
||||||
name: Check JSON
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Register check-json problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
|
||||||
- name: Run check-json
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual check-json --all-files
|
|
||||||
|
|
||||||
lint-pylint:
|
|
||||||
name: Check pylint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Install additional system dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends libpulse0
|
|
||||||
- name: Register pylint problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
|
||||||
- name: Run pylint
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pylint supervisor tests
|
|
||||||
|
|
||||||
mypy:
|
|
||||||
name: Check mypy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Generate partial mypy restore key
|
|
||||||
id: generate-mypy-key
|
|
||||||
run: |
|
|
||||||
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
|
|
||||||
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
|
||||||
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: >-
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore mypy cache
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: .mypy_cache
|
|
||||||
key: >-
|
|
||||||
${{ runner.os }}-mypy-${{ needs.prepare.outputs.python-version }}-${{ steps.generate-mypy-key.outputs.key }}
|
|
||||||
restore-keys: >-
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-mypy-${{ env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}
|
|
||||||
- name: Register mypy problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/mypy.json"
|
|
||||||
- name: Run mypy
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
mypy --ignore-missing-imports supervisor
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@v3.9.2
|
|
||||||
with:
|
|
||||||
cosign-release: "v2.4.3"
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Install additional system dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
|
|
||||||
- name: Register Python problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
|
||||||
- name: Install Pytest Annotation plugin
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
# Ideally this should be part of our dependencies
|
|
||||||
# However this plugin is fairly new and doesn't run correctly
|
|
||||||
# on a non-GitHub environment.
|
|
||||||
pip install pytest-github-actions-annotate-failures
|
|
||||||
- name: Run pytest
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pytest \
|
|
||||||
-qq \
|
|
||||||
--timeout=10 \
|
|
||||||
--durations=10 \
|
|
||||||
--cov supervisor \
|
|
||||||
-o console_output_style=count \
|
|
||||||
tests
|
|
||||||
- name: Upload coverage artifact
|
|
||||||
uses: actions/upload-artifact@v4.6.2
|
|
||||||
with:
|
|
||||||
name: coverage-${{ matrix.python-version }}
|
|
||||||
path: .coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
name: Process test coverage
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: ["pytest", "prepare"]
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.6.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Download all coverage artifacts
|
|
||||||
uses: actions/download-artifact@v4.3.0
|
|
||||||
- name: Combine coverage results
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
coverage combine coverage*/.coverage*
|
|
||||||
coverage report
|
|
||||||
coverage xml
|
|
||||||
- name: Upload coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v5.4.3
|
|
20
.github/workflows/lock.yml
vendored
20
.github/workflows/lock.yml
vendored
@ -1,20 +0,0 @@
|
|||||||
name: Lock
|
|
||||||
|
|
||||||
# yamllint disable-line rule:truthy
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lock:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: dessant/lock-threads@v5.0.1
|
|
||||||
with:
|
|
||||||
github-token: ${{ github.token }}
|
|
||||||
issue-inactive-days: "30"
|
|
||||||
exclude-issue-created-before: "2020-10-01T00:00:00Z"
|
|
||||||
issue-lock-reason: ""
|
|
||||||
pr-inactive-days: "1"
|
|
||||||
exclude-pr-created-before: "2020-11-01T00:00:00Z"
|
|
||||||
pr-lock-reason: ""
|
|
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "check-executables-have-shebangs",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.+):\\s(.+)$",
|
|
||||||
"file": 1,
|
|
||||||
"message": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
16
.github/workflows/matchers/check-json.json
vendored
16
.github/workflows/matchers/check-json.json
vendored
@ -1,16 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "check-json",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
|
||||||
"file": 1,
|
|
||||||
"message": 2,
|
|
||||||
"line": 3,
|
|
||||||
"column": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
16
.github/workflows/matchers/hadolint.json
vendored
16
.github/workflows/matchers/hadolint.json
vendored
@ -1,16 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "hadolint",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"message": 3,
|
|
||||||
"code": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
16
.github/workflows/matchers/mypy.json
vendored
16
.github/workflows/matchers/mypy.json
vendored
@ -1,16 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "mypy",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"severity": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
32
.github/workflows/matchers/pylint.json
vendored
32
.github/workflows/matchers/pylint.json
vendored
@ -1,32 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "pylint-error",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4,
|
|
||||||
"code": 5
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"owner": "pylint-warning",
|
|
||||||
"severity": "warning",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4,
|
|
||||||
"code": 5
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
18
.github/workflows/matchers/python.json
vendored
18
.github/workflows/matchers/python.json
vendored
@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "python",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
|
||||||
"message": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
44
.github/workflows/release-drafter.yml
vendored
44
.github/workflows/release-drafter.yml
vendored
@ -1,44 +0,0 @@
|
|||||||
name: Release Drafter
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update_release_draft:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Release Drafter
|
|
||||||
steps:
|
|
||||||
- name: Checkout the repository
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Find Next Version
|
|
||||||
id: version
|
|
||||||
run: |
|
|
||||||
declare -i newpost
|
|
||||||
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
|
|
||||||
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
|
|
||||||
datepre=$(date --utc '+%Y.%m')
|
|
||||||
|
|
||||||
|
|
||||||
if [[ "$latestpre" == "$datepre" ]]; then
|
|
||||||
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
|
|
||||||
newpost=$latestpost+1
|
|
||||||
else
|
|
||||||
newpost=0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo Current version: $latest
|
|
||||||
echo New target version: $datepre.$newpost
|
|
||||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Run Release Drafter
|
|
||||||
uses: release-drafter/release-drafter@v6.1.0
|
|
||||||
with:
|
|
||||||
tag: ${{ steps.version.outputs.version }}
|
|
||||||
name: ${{ steps.version.outputs.version }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
58
.github/workflows/restrict-task-creation.yml
vendored
58
.github/workflows/restrict-task-creation.yml
vendored
@ -1,58 +0,0 @@
|
|||||||
name: Restrict task creation
|
|
||||||
|
|
||||||
# yamllint disable-line rule:truthy
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [opened]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-authorization:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Only run if this is a Task issue type (from the issue form)
|
|
||||||
if: github.event.issue.issue_type == 'Task'
|
|
||||||
steps:
|
|
||||||
- name: Check if user is authorized
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const issueAuthor = context.payload.issue.user.login;
|
|
||||||
|
|
||||||
// Check if user is an organization member
|
|
||||||
try {
|
|
||||||
await github.rest.orgs.checkMembershipForUser({
|
|
||||||
org: 'home-assistant',
|
|
||||||
username: issueAuthor
|
|
||||||
});
|
|
||||||
console.log(`✅ ${issueAuthor} is an organization member`);
|
|
||||||
return; // Authorized
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close the issue with a comment
|
|
||||||
await github.rest.issues.createComment({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
|
|
||||||
`Task issues are restricted to Open Home Foundation staff and authorized contributors.\n\n` +
|
|
||||||
`If you would like to:\n` +
|
|
||||||
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)\n` +
|
|
||||||
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
|
|
||||||
`If you believe you should have access to create Task issues, please contact the maintainers.`
|
|
||||||
});
|
|
||||||
|
|
||||||
await github.rest.issues.update({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
state: 'closed'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add a label to indicate this was auto-closed
|
|
||||||
await github.rest.issues.addLabels({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
labels: ['auto-closed']
|
|
||||||
});
|
|
21
.github/workflows/sentry.yaml
vendored
21
.github/workflows/sentry.yaml
vendored
@ -1,21 +0,0 @@
|
|||||||
name: Sentry Release
|
|
||||||
|
|
||||||
# yamllint disable-line rule:truthy
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published, prereleased]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
createSentryRelease:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Sentry Release
|
|
||||||
uses: getsentry/action-release@v3.2.0
|
|
||||||
env:
|
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
|
||||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
|
||||||
with:
|
|
||||||
environment: production
|
|
39
.github/workflows/stale.yml
vendored
39
.github/workflows/stale.yml
vendored
@ -1,39 +0,0 @@
|
|||||||
name: Stale
|
|
||||||
|
|
||||||
# yamllint disable-line rule:truthy
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 * * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/stale@v9.1.0
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
days-before-stale: 30
|
|
||||||
days-before-close: 7
|
|
||||||
stale-issue-label: "stale"
|
|
||||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
|
||||||
stale-issue-message: >
|
|
||||||
There hasn't been any activity on this issue recently. Due to the
|
|
||||||
high number of incoming GitHub notifications, we have to clean some
|
|
||||||
of the old issues, as many of them have already been resolved with
|
|
||||||
the latest updates.
|
|
||||||
|
|
||||||
Please make sure to update to the latest version and check if that
|
|
||||||
solves the issue. Let us know if that works for you by
|
|
||||||
adding a comment 👍
|
|
||||||
|
|
||||||
This issue has now been marked as stale and will be closed if no
|
|
||||||
further activity occurs. Thank you for your contributions.
|
|
||||||
|
|
||||||
stale-pr-label: "stale"
|
|
||||||
exempt-pr-labels: "no-stale,pinned,rfc,security"
|
|
||||||
stale-pr-message: >
|
|
||||||
There hasn't been any activity on this pull request recently. This
|
|
||||||
pull request has been automatically marked as stale because of that
|
|
||||||
and will be closed if no further activity occurs within 7 days.
|
|
||||||
|
|
||||||
Thank you for your contributions.
|
|
82
.github/workflows/update_frontend.yml
vendored
82
.github/workflows/update_frontend.yml
vendored
@ -1,82 +0,0 @@
|
|||||||
name: Update frontend
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule: # once a day
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-version:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
|
||||||
current_version: ${{ steps.check_version.outputs.current_version }}
|
|
||||||
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Get latest frontend release
|
|
||||||
id: latest_frontend_version
|
|
||||||
uses: abatilo/release-info-action@v1.3.3
|
|
||||||
with:
|
|
||||||
owner: home-assistant
|
|
||||||
repo: frontend
|
|
||||||
- name: Check if version is up to date
|
|
||||||
id: check_version
|
|
||||||
run: |
|
|
||||||
current_version="$(cat .ha-frontend-version)"
|
|
||||||
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
|
|
||||||
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
|
|
||||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
|
|
||||||
if [[ ! "$current_version" < "$latest_version" ]]; then
|
|
||||||
echo "Frontend version is up to date"
|
|
||||||
echo "skip=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
- name: Check if there is no open PR with this version
|
|
||||||
if: steps.check_version.outputs.skip != 'true'
|
|
||||||
id: check_existing_pr
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
run: |
|
|
||||||
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
|
|
||||||
if [[ "$PR" != "[]" ]]; then
|
|
||||||
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
|
||||||
echo "skip=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
create-pr:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: check-version
|
|
||||||
if: needs.check-version.outputs.skip != 'true'
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Clear www folder
|
|
||||||
run: |
|
|
||||||
rm -rf supervisor/api/panel/*
|
|
||||||
- name: Update version file
|
|
||||||
run: |
|
|
||||||
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
|
|
||||||
- name: Download release assets
|
|
||||||
uses: robinraju/release-downloader@v1
|
|
||||||
with:
|
|
||||||
repository: 'home-assistant/frontend'
|
|
||||||
tag: ${{ needs.check-version.outputs.latest_version }}
|
|
||||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
|
|
||||||
extract: true
|
|
||||||
out-file-path: supervisor/api/panel/
|
|
||||||
- name: Remove release assets archive
|
|
||||||
run: |
|
|
||||||
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
|
|
||||||
- name: Create PR
|
|
||||||
uses: peter-evans/create-pull-request@v7
|
|
||||||
with:
|
|
||||||
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
|
||||||
branch: autoupdate-frontend
|
|
||||||
base: main
|
|
||||||
draft: true
|
|
||||||
sign-commits: true
|
|
||||||
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
|
||||||
body: >
|
|
||||||
Update frontend from ${{ needs.check-version.outputs.current_version }} to
|
|
||||||
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})
|
|
||||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -95,8 +95,3 @@ ENV/
|
|||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/cSpell.json
|
!.vscode/cSpell.json
|
||||||
!.vscode/tasks.json
|
!.vscode/tasks.json
|
||||||
!.vscode/launch.json
|
|
||||||
|
|
||||||
# mypy
|
|
||||||
/.mypy_cache/*
|
|
||||||
/.dmypy.json
|
|
||||||
|
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[submodule "home-assistant-polymer"]
|
||||||
|
path = home-assistant-polymer
|
||||||
|
url = https://github.com/home-assistant/home-assistant-polymer
|
||||||
|
branch = dev
|
@ -1 +0,0 @@
|
|||||||
20250401.0
|
|
@ -1,7 +1,5 @@
|
|||||||
ignored:
|
ignored:
|
||||||
- DL3003
|
- DL3018
|
||||||
- DL3006
|
- DL3006
|
||||||
- DL3013
|
- DL3013
|
||||||
- DL3018
|
|
||||||
- DL3042
|
|
||||||
- SC2155
|
- SC2155
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
repos:
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
||||||
rev: v0.11.10
|
|
||||||
hooks:
|
|
||||||
- id: ruff
|
|
||||||
args:
|
|
||||||
- --fix
|
|
||||||
- id: ruff-format
|
|
||||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v5.0.0
|
|
||||||
hooks:
|
|
||||||
- id: check-executables-have-shebangs
|
|
||||||
stages: [manual]
|
|
||||||
- id: check-json
|
|
||||||
- repo: local
|
|
||||||
hooks:
|
|
||||||
# Run mypy through our wrapper script in order to get the possible
|
|
||||||
# pyenv and/or virtualenv activated; it may not have been e.g. if
|
|
||||||
# committing from a GUI tool that was not launched from an activated
|
|
||||||
# shell.
|
|
||||||
- id: mypy
|
|
||||||
name: mypy
|
|
||||||
entry: script/run-in-env.sh mypy --ignore-missing-imports
|
|
||||||
language: script
|
|
||||||
types_or: [python, pyi]
|
|
||||||
files: ^supervisor/.+\.(py|pyi)$
|
|
21
.vcnignore
21
.vcnignore
@ -1,21 +0,0 @@
|
|||||||
# Byte-compiled / optimized / DLL files
|
|
||||||
__pycache__/
|
|
||||||
*.py[cod]
|
|
||||||
*$py.class
|
|
||||||
|
|
||||||
# Distribution / packaging
|
|
||||||
*.egg-info/
|
|
||||||
|
|
||||||
# General files
|
|
||||||
.git
|
|
||||||
.github
|
|
||||||
.devcontainer
|
|
||||||
.vscode
|
|
||||||
.tox
|
|
||||||
|
|
||||||
# Data
|
|
||||||
home-assistant-polymer/
|
|
||||||
script/
|
|
||||||
tests/
|
|
||||||
data/
|
|
||||||
venv/
|
|
25
.vscode/launch.json
vendored
25
.vscode/launch.json
vendored
@ -1,25 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"name": "Supervisor remote debug",
|
|
||||||
"type": "python",
|
|
||||||
"request": "attach",
|
|
||||||
"port": 33333,
|
|
||||||
"host": "172.30.32.2",
|
|
||||||
"pathMappings": [
|
|
||||||
{
|
|
||||||
"localRoot": "${workspaceFolder}",
|
|
||||||
"remoteRoot": "/usr/src/supervisor"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Debug Tests",
|
|
||||||
"type": "python",
|
|
||||||
"request": "test",
|
|
||||||
"console": "internalConsole",
|
|
||||||
"justMyCode": false
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
201
.vscode/tasks.json
vendored
201
.vscode/tasks.json
vendored
@ -1,111 +1,92 @@
|
|||||||
{
|
{
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"tasks": [
|
"tasks": [
|
||||||
{
|
{
|
||||||
"label": "Run Supervisor",
|
"label": "Run Testenv",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "supervisor_run",
|
"command": "./scripts/test_env.sh",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true,
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Run Supervisor CLI",
|
"label": "Run Testenv CLI",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true,
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Update Supervisor Panel",
|
"label": "Update UI",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "LOKALISE_TOKEN='${input:localiseToken}' ./scripts/update-frontend.sh",
|
"command": "./scripts/update-frontend.sh",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Pytest",
|
"label": "Pytest",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pytest --timeout=10 tests",
|
"command": "pytest --timeout=10 tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true,
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Ruff Check",
|
"label": "Flake8",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "ruff check --fix supervisor tests",
|
"command": "flake8 hassio tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true,
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Ruff Format",
|
"label": "Pylint",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "ruff format supervisor tests",
|
"command": "pylint hassio",
|
||||||
"group": {
|
"dependsOn": [
|
||||||
"kind": "test",
|
"Install all Requirements"
|
||||||
"isDefault": true
|
],
|
||||||
},
|
"group": {
|
||||||
"presentation": {
|
"kind": "test",
|
||||||
"reveal": "always",
|
"isDefault": true,
|
||||||
"panel": "new"
|
},
|
||||||
},
|
"presentation": {
|
||||||
"problemMatcher": []
|
"reveal": "always",
|
||||||
},
|
"panel": "new"
|
||||||
{
|
},
|
||||||
"label": "Pylint",
|
"problemMatcher": []
|
||||||
"type": "shell",
|
}
|
||||||
"command": "pylint supervisor",
|
]
|
||||||
"dependsOn": ["Install all Requirements"],
|
}
|
||||||
"group": {
|
|
||||||
"kind": "test",
|
|
||||||
"isDefault": true
|
|
||||||
},
|
|
||||||
"presentation": {
|
|
||||||
"reveal": "always",
|
|
||||||
"panel": "new"
|
|
||||||
},
|
|
||||||
"problemMatcher": []
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"id": "localiseToken",
|
|
||||||
"type": "promptString",
|
|
||||||
"description": "Paste your lokalise token to download frontend translations"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
874
API.md
Normal file
874
API.md
Normal file
@ -0,0 +1,874 @@
|
|||||||
|
# Hass.io
|
||||||
|
|
||||||
|
## Hass.io RESTful API
|
||||||
|
|
||||||
|
Interface for Home Assistant to control things from supervisor.
|
||||||
|
|
||||||
|
On error / Code 400:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"result": "error",
|
||||||
|
"message": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
On success / Code 200:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"result": "ok",
|
||||||
|
"data": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
||||||
|
|
||||||
|
### Hass.io
|
||||||
|
|
||||||
|
- GET `/supervisor/ping`
|
||||||
|
|
||||||
|
This API call don't need a token.
|
||||||
|
|
||||||
|
- GET `/supervisor/info`
|
||||||
|
|
||||||
|
The addons from `addons` are only installed one.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "INSTALL_VERSION",
|
||||||
|
"last_version": "LAST_VERSION",
|
||||||
|
"arch": "armhf|aarch64|i386|amd64",
|
||||||
|
"channel": "stable|beta|dev",
|
||||||
|
"timezone": "TIMEZONE",
|
||||||
|
"logging": "debug|info|warning|error|critical",
|
||||||
|
"ip_address": "ip address",
|
||||||
|
"wait_boot": "int",
|
||||||
|
"debug": "bool",
|
||||||
|
"debug_block": "bool",
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "xy bla",
|
||||||
|
"slug": "xy",
|
||||||
|
"description": "description",
|
||||||
|
"repository": "12345678|null",
|
||||||
|
"version": "LAST_VERSION",
|
||||||
|
"installed": "INSTALL_VERSION",
|
||||||
|
"icon": "bool",
|
||||||
|
"logo": "bool",
|
||||||
|
"state": "started|stopped"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"addons_repositories": ["REPO_URL"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/supervisor/update`
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/supervisor/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"channel": "stable|beta|dev",
|
||||||
|
"timezone": "TIMEZONE",
|
||||||
|
"wait_boot": "int",
|
||||||
|
"debug": "bool",
|
||||||
|
"debug_block": "bool",
|
||||||
|
"logging": "debug|info|warning|error|critical",
|
||||||
|
"addons_repositories": ["REPO_URL"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/supervisor/reload`
|
||||||
|
|
||||||
|
Reload addons/version.
|
||||||
|
|
||||||
|
- GET `/supervisor/logs`
|
||||||
|
|
||||||
|
Output is the raw docker log.
|
||||||
|
|
||||||
|
- GET `/supervisor/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/supervisor/repair`
|
||||||
|
|
||||||
|
Repair overlayfs issue and restore lost images
|
||||||
|
|
||||||
|
### Snapshot
|
||||||
|
|
||||||
|
- GET `/snapshots`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"snapshots": [
|
||||||
|
{
|
||||||
|
"slug": "SLUG",
|
||||||
|
"date": "ISO",
|
||||||
|
"name": "Custom name",
|
||||||
|
"type": "full|partial",
|
||||||
|
"protected": "bool"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
|
- POST `/snapshots/new/upload`
|
||||||
|
|
||||||
|
return:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/snapshots/new/full`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Optional",
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
return:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/snapshots/new/partial`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Optional",
|
||||||
|
"addons": ["ADDON_SLUG"],
|
||||||
|
"folders": ["FOLDER_NAME"],
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
return:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
|
- GET `/snapshots/{slug}/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"slug": "SNAPSHOT ID",
|
||||||
|
"type": "full|partial",
|
||||||
|
"name": "custom snapshot name / description",
|
||||||
|
"date": "ISO",
|
||||||
|
"size": "SIZE_IN_MB",
|
||||||
|
"protected": "bool",
|
||||||
|
"homeassistant": "version",
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"slug": "ADDON_SLUG",
|
||||||
|
"name": "NAME",
|
||||||
|
"version": "INSTALLED_VERSION",
|
||||||
|
"size": "SIZE_IN_MB"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"repositories": ["URL"],
|
||||||
|
"folders": ["NAME"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/snapshots/{slug}/remove`
|
||||||
|
|
||||||
|
- GET `/snapshots/{slug}/download`
|
||||||
|
|
||||||
|
- POST `/snapshots/{slug}/restore/full`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/snapshots/{slug}/restore/partial`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"homeassistant": "bool",
|
||||||
|
"addons": ["ADDON_SLUG"],
|
||||||
|
"folders": ["FOLDER_NAME"],
|
||||||
|
"password": "Optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Host
|
||||||
|
|
||||||
|
- POST `/host/reload`
|
||||||
|
|
||||||
|
- POST `/host/shutdown`
|
||||||
|
|
||||||
|
- POST `/host/reboot`
|
||||||
|
|
||||||
|
- GET `/host/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"hostname": "hostname|null",
|
||||||
|
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||||
|
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||||
|
"kernel": "4.15.7|null",
|
||||||
|
"chassis": "specific|null",
|
||||||
|
"deployment": "stable|beta|dev|null",
|
||||||
|
"cpe": "xy|null"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/host/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"hostname": ""
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/host/reload`
|
||||||
|
|
||||||
|
#### Services
|
||||||
|
|
||||||
|
- GET `/host/services`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"services": [
|
||||||
|
{
|
||||||
|
"name": "xy.service",
|
||||||
|
"description": "XY ...",
|
||||||
|
"state": "active|"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/host/service/{unit}/stop`
|
||||||
|
|
||||||
|
- POST `/host/service/{unit}/start`
|
||||||
|
|
||||||
|
- POST `/host/service/{unit}/reload`
|
||||||
|
|
||||||
|
### HassOS
|
||||||
|
|
||||||
|
- GET `/hassos/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "2.3",
|
||||||
|
"version_cli": "7",
|
||||||
|
"version_latest": "2.4",
|
||||||
|
"version_cli_latest": "8",
|
||||||
|
"board": "ova|rpi",
|
||||||
|
"boot": "rauc boot slot"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/hassos/update`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/hassos/update/cli`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/hassos/config/sync`
|
||||||
|
|
||||||
|
Load host configs from a USB stick.
|
||||||
|
|
||||||
|
### Hardware
|
||||||
|
|
||||||
|
- GET `/hardware/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"serial": ["/dev/xy"],
|
||||||
|
"input": ["Input device name"],
|
||||||
|
"disk": ["/dev/sdax"],
|
||||||
|
"gpio": ["gpiochip0", "gpiochip100"],
|
||||||
|
"audio": {
|
||||||
|
"CARD_ID": {
|
||||||
|
"name": "xy",
|
||||||
|
"type": "microphone",
|
||||||
|
"devices": [
|
||||||
|
"chan_id": "channel ID",
|
||||||
|
"chan_type": "type of device"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/hardware/audio`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"audio": {
|
||||||
|
"input": {
|
||||||
|
"0,0": "Mic"
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"1,0": "Jack",
|
||||||
|
"1,1": "HDMI"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/hardware/trigger`
|
||||||
|
|
||||||
|
Trigger an udev reload
|
||||||
|
|
||||||
|
### Home Assistant
|
||||||
|
|
||||||
|
- GET `/homeassistant/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "INSTALL_VERSION",
|
||||||
|
"last_version": "LAST_VERSION",
|
||||||
|
"arch": "arch",
|
||||||
|
"machine": "Image machine type",
|
||||||
|
"ip_address": "ip address",
|
||||||
|
"image": "str",
|
||||||
|
"custom": "bool -> if custom image",
|
||||||
|
"boot": "bool",
|
||||||
|
"port": 8123,
|
||||||
|
"ssl": "bool",
|
||||||
|
"watchdog": "bool",
|
||||||
|
"wait_boot": 600
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/homeassistant/update`
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/homeassistant/logs`
|
||||||
|
|
||||||
|
Output is the raw Docker log.
|
||||||
|
|
||||||
|
- POST `/homeassistant/restart`
|
||||||
|
- POST `/homeassistant/check`
|
||||||
|
- POST `/homeassistant/start`
|
||||||
|
- POST `/homeassistant/stop`
|
||||||
|
- POST `/homeassistant/rebuild`
|
||||||
|
|
||||||
|
- POST `/homeassistant/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"image": "Optional|null",
|
||||||
|
"last_version": "Optional for custom image|null",
|
||||||
|
"port": "port for access hass",
|
||||||
|
"ssl": "bool",
|
||||||
|
"refresh_token": "",
|
||||||
|
"watchdog": "bool",
|
||||||
|
"wait_boot": 600
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Image with `null` and last_version with `null` reset this options.
|
||||||
|
|
||||||
|
- POST/GET `/homeassistant/api`
|
||||||
|
|
||||||
|
Proxy to real home-assistant instance.
|
||||||
|
|
||||||
|
- GET `/homeassistant/websocket`
|
||||||
|
|
||||||
|
Proxy to real websocket instance.
|
||||||
|
|
||||||
|
- GET `/homeassistant/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### RESTful for API addons
|
||||||
|
|
||||||
|
If an add-on will call itself, you can use `/addons/self/...`.
|
||||||
|
|
||||||
|
- GET `/addons`
|
||||||
|
|
||||||
|
Get all available addons.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "xy bla",
|
||||||
|
"slug": "xy",
|
||||||
|
"description": "description",
|
||||||
|
"advanced": "bool",
|
||||||
|
"stage": "stable|experimental|deprecated",
|
||||||
|
"repository": "core|local|REP_ID",
|
||||||
|
"version": "LAST_VERSION",
|
||||||
|
"installed": "none|INSTALL_VERSION",
|
||||||
|
"detached": "bool",
|
||||||
|
"available": "bool",
|
||||||
|
"build": "bool",
|
||||||
|
"url": "null|url",
|
||||||
|
"icon": "bool",
|
||||||
|
"logo": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"repositories": [
|
||||||
|
{
|
||||||
|
"slug": "12345678",
|
||||||
|
"name": "Repitory Name|unknown",
|
||||||
|
"source": "URL_OF_REPOSITORY",
|
||||||
|
"url": "WEBSITE|REPOSITORY",
|
||||||
|
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/addons/reload`
|
||||||
|
- GET `/addons/{addon}/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "xy bla",
|
||||||
|
"slug": "xdssd_xybla",
|
||||||
|
"hostname": "xdssd-xybla",
|
||||||
|
"dns": [],
|
||||||
|
"description": "description",
|
||||||
|
"long_description": "null|markdown",
|
||||||
|
"auto_update": "bool",
|
||||||
|
"url": "null|url of addon",
|
||||||
|
"detached": "bool",
|
||||||
|
"available": "bool",
|
||||||
|
"advanced": "bool",
|
||||||
|
"stage": "stable|experimental|deprecated",
|
||||||
|
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||||
|
"machine": "[raspberrypi2, tinker]",
|
||||||
|
"homeassistant": "null|min Home Assistant version",
|
||||||
|
"repository": "12345678|null",
|
||||||
|
"version": "null|VERSION_INSTALLED",
|
||||||
|
"last_version": "LAST_VERSION",
|
||||||
|
"state": "none|started|stopped",
|
||||||
|
"boot": "auto|manual",
|
||||||
|
"build": "bool",
|
||||||
|
"options": "{}",
|
||||||
|
"schema": "{}|null",
|
||||||
|
"network": "{}|null",
|
||||||
|
"network_description": "{}|null",
|
||||||
|
"host_network": "bool",
|
||||||
|
"host_pid": "bool",
|
||||||
|
"host_ipc": "bool",
|
||||||
|
"host_dbus": "bool",
|
||||||
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
|
"apparmor": "disable|default|profile",
|
||||||
|
"devices": ["/dev/xy"],
|
||||||
|
"udev": "bool",
|
||||||
|
"auto_uart": "bool",
|
||||||
|
"icon": "bool",
|
||||||
|
"logo": "bool",
|
||||||
|
"changelog": "bool",
|
||||||
|
"documentation": "bool",
|
||||||
|
"hassio_api": "bool",
|
||||||
|
"hassio_role": "default|homeassistant|manager|admin",
|
||||||
|
"homeassistant_api": "bool",
|
||||||
|
"auth_api": "bool",
|
||||||
|
"full_access": "bool",
|
||||||
|
"protected": "bool",
|
||||||
|
"rating": "1-6",
|
||||||
|
"stdin": "bool",
|
||||||
|
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||||
|
"gpio": "bool",
|
||||||
|
"kernel_modules": "bool",
|
||||||
|
"devicetree": "bool",
|
||||||
|
"docker_api": "bool",
|
||||||
|
"audio": "bool",
|
||||||
|
"audio_input": "null|0,0",
|
||||||
|
"audio_output": "null|0,0",
|
||||||
|
"services_role": "['service:access']",
|
||||||
|
"discovery": "['service']",
|
||||||
|
"ip_address": "ip address",
|
||||||
|
"ingress": "bool",
|
||||||
|
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||||
|
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||||
|
"ingress_port": "null|int",
|
||||||
|
"ingress_panel": "null|bool"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/icon`
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/changelog`
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/documentation`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"boot": "auto|manual",
|
||||||
|
"auto_update": "bool",
|
||||||
|
"network": {
|
||||||
|
"CONTAINER": "port|[ip, port]"
|
||||||
|
},
|
||||||
|
"options": {},
|
||||||
|
"audio_output": "null|0,0",
|
||||||
|
"audio_input": "null|0,0",
|
||||||
|
"ingress_panel": "bool"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Reset custom network/audio/options, set it `null`.
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/security`
|
||||||
|
|
||||||
|
This function is not callable by itself.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"protected": "bool"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/start`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/stop`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/install`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/uninstall`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/update`
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/logs`
|
||||||
|
|
||||||
|
Output is the raw Docker log.
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/restart`
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/rebuild`
|
||||||
|
|
||||||
|
Only supported for local build addons
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/stdin`
|
||||||
|
|
||||||
|
Write data to add-on stdin
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ingress
|
||||||
|
|
||||||
|
- POST `/ingress/session`
|
||||||
|
|
||||||
|
Create a new Session for access to ingress service.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"session": "token"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/ingress/panels`
|
||||||
|
|
||||||
|
Return a list of enabled panels.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"panels": {
|
||||||
|
"addon_slug": {
|
||||||
|
"enable": "boolean",
|
||||||
|
"icon": "mdi:...",
|
||||||
|
"title": "title",
|
||||||
|
"admin": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- VIEW `/ingress/{token}`
|
||||||
|
|
||||||
|
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||||
|
Need ingress session as cookie.
|
||||||
|
|
||||||
|
### discovery
|
||||||
|
|
||||||
|
- GET `/discovery`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"discovery": [
|
||||||
|
{
|
||||||
|
"addon": "slug",
|
||||||
|
"service": "name",
|
||||||
|
"uuid": "uuid",
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/discovery/{UUID}`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"addon": "slug",
|
||||||
|
"service": "name",
|
||||||
|
"uuid": "uuid",
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/discovery`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"service": "name",
|
||||||
|
"config": {}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
return:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"uuid": "uuid"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- DEL `/discovery/{UUID}`
|
||||||
|
|
||||||
|
### Services
|
||||||
|
|
||||||
|
- GET `/services`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"services": [
|
||||||
|
{
|
||||||
|
"slug": "name",
|
||||||
|
"available": "bool",
|
||||||
|
"providers": "list"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### MQTT
|
||||||
|
|
||||||
|
- GET `/services/mqtt`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"addon": "name",
|
||||||
|
"host": "xy",
|
||||||
|
"port": "8883",
|
||||||
|
"ssl": "bool",
|
||||||
|
"username": "optional",
|
||||||
|
"password": "optional",
|
||||||
|
"protocol": "3.1.1"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/services/mqtt`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "xy",
|
||||||
|
"port": "8883",
|
||||||
|
"ssl": "bool|optional",
|
||||||
|
"username": "optional",
|
||||||
|
"password": "optional",
|
||||||
|
"protocol": "3.1.1"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- DEL `/services/mqtt`
|
||||||
|
|
||||||
|
#### MySQL
|
||||||
|
|
||||||
|
- GET `/services/mysql`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"addon": "name",
|
||||||
|
"host": "xy",
|
||||||
|
"port": "8883",
|
||||||
|
"username": "optional",
|
||||||
|
"password": "optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/services/mysql`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "xy",
|
||||||
|
"port": "8883",
|
||||||
|
"username": "optional",
|
||||||
|
"password": "optional"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- DEL `/services/mysql`
|
||||||
|
|
||||||
|
### Misc
|
||||||
|
|
||||||
|
- GET `/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"supervisor": "version",
|
||||||
|
"homeassistant": "version",
|
||||||
|
"hassos": "null|version",
|
||||||
|
"hostname": "name",
|
||||||
|
"machine": "type",
|
||||||
|
"arch": "arch",
|
||||||
|
"supported_arch": ["arch1", "arch2"],
|
||||||
|
"channel": "stable|beta|dev",
|
||||||
|
"logging": "debug|info|warning|error|critical",
|
||||||
|
"timezone": "Europe/Zurich"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### DNS
|
||||||
|
|
||||||
|
- GET `/dns/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "ip-address",
|
||||||
|
"version": "1",
|
||||||
|
"latest_version": "2",
|
||||||
|
"servers": ["dns://8.8.8.8"],
|
||||||
|
"locals": ["dns://xy"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/options`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"servers": ["dns://8.8.8.8"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/update`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/restart`
|
||||||
|
|
||||||
|
- POST `/dns/reset`
|
||||||
|
|
||||||
|
- GET `/dns/logs`
|
||||||
|
|
||||||
|
- GET `/dns/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Auth / SSO API
|
||||||
|
|
||||||
|
You can use the user system on homeassistant. We handle this auth system on
|
||||||
|
supervisor.
|
||||||
|
|
||||||
|
You can call post `/auth`
|
||||||
|
|
||||||
|
We support:
|
||||||
|
|
||||||
|
- Json `{ "user|name": "...", "password": "..." }`
|
||||||
|
- application/x-www-form-urlencoded `user|name=...&password=...`
|
||||||
|
- BasicAuth
|
||||||
|
|
||||||
|
* POST `/auth/reset`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"username": "xy",
|
||||||
|
"password": "new-password"
|
||||||
|
}
|
||||||
|
```
|
66
Dockerfile
66
Dockerfile
@ -1,53 +1,37 @@
|
|||||||
ARG BUILD_FROM
|
ARG BUILD_FROM
|
||||||
FROM ${BUILD_FROM}
|
FROM $BUILD_FROM
|
||||||
|
|
||||||
ENV \
|
|
||||||
S6_SERVICES_GRACETIME=10000 \
|
|
||||||
SUPERVISOR_API=http://localhost \
|
|
||||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
|
|
||||||
UV_SYSTEM_PYTHON=true
|
|
||||||
|
|
||||||
ARG \
|
|
||||||
COSIGN_VERSION \
|
|
||||||
BUILD_ARCH \
|
|
||||||
QEMU_CPU
|
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
openssl \
|
||||||
|
libffi \
|
||||||
|
musl \
|
||||||
|
git \
|
||||||
|
socat \
|
||||||
|
glib \
|
||||||
|
eudev \
|
||||||
|
eudev-libs
|
||||||
|
|
||||||
|
ARG BUILD_ARCH
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
RUN \
|
|
||||||
set -x \
|
|
||||||
&& apk add --no-cache \
|
|
||||||
findutils \
|
|
||||||
eudev \
|
|
||||||
eudev-libs \
|
|
||||||
git \
|
|
||||||
libffi \
|
|
||||||
libpulse \
|
|
||||||
musl \
|
|
||||||
openssl \
|
|
||||||
yaml \
|
|
||||||
\
|
|
||||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
|
||||||
&& chmod a+x /usr/bin/cosign \
|
|
||||||
&& pip3 install uv==0.6.17
|
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN \
|
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||||
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||||
setarch="linux32"; \
|
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
else \
|
-r ./requirements.txt \
|
||||||
setarch=""; \
|
|
||||||
fi \
|
|
||||||
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
|
||||||
&& rm -f requirements.txt
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install Home Assistant Supervisor
|
# Install HassIO
|
||||||
COPY . supervisor
|
COPY . hassio
|
||||||
RUN \
|
RUN pip3 install --no-cache-dir -e ./hassio \
|
||||||
uv pip install --no-cache -e ./supervisor \
|
&& python3 -m compileall ./hassio/hassio
|
||||||
&& python3 -m compileall ./supervisor/supervisor
|
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize udev daemon, handle CMD
|
||||||
|
COPY entry.sh /bin/
|
||||||
|
ENTRYPOINT ["/bin/entry.sh"]
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
COPY rootfs /
|
CMD [ "python3", "-m", "hassio" ]
|
||||||
|
4
LICENSE
4
LICENSE
@ -178,7 +178,7 @@
|
|||||||
APPENDIX: How to apply the Apache License to your work.
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
To apply the Apache License to your work, attach the following
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
replaced with your own identifying information. (Don't include
|
replaced with your own identifying information. (Don't include
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
comment syntax for the file format. We also recommend that a
|
comment syntax for the file format. We also recommend that a
|
||||||
@ -186,7 +186,7 @@
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
Copyright 2017 Pascal Vizeli
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
include LICENSE.md
|
include LICENSE.md
|
||||||
graft supervisor
|
graft hassio
|
||||||
recursive-exclude * *.py[co]
|
recursive-exclude * *.py[co]
|
||||||
|
42
README.md
42
README.md
@ -1,34 +1,30 @@
|
|||||||
# Home Assistant Supervisor
|
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||||
|
|
||||||
|
# Hass.io
|
||||||
|
|
||||||
## First private cloud solution for home automation
|
## First private cloud solution for home automation
|
||||||
|
|
||||||
Home Assistant (former Hass.io) is a container-based system for managing your
|
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||||
Home Assistant Core installation and related applications. The system is
|
and related applications. The system is controlled via Home Assistant which
|
||||||
controlled via Home Assistant which communicates with the Supervisor. The
|
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||||
Supervisor provides an API to manage the installation. This includes changing
|
installation. This includes changing network settings or installing
|
||||||
network settings or installing and updating software.
|
and updating software.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Installation instructions can be found at https://home-assistant.io/getting-started.
|
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
|
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||||
Development instructions can be found [here][development].
|
|
||||||
|
|
||||||
## Release
|
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||||
|
- Go into a HassOS device or VM and pull your supervisor.
|
||||||
|
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||||
|
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||||
|
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||||
|
- Test your changes
|
||||||
|
|
||||||
Releases are done in 3 stages (channels) with this structure:
|
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||||
|
|
||||||
1. Pull requests are merged to the `main` branch.
|
|
||||||
2. A new build is pushed to the `dev` stage.
|
|
||||||
3. Releases are published.
|
|
||||||
4. A new build is pushed to the `beta` stage.
|
|
||||||
5. The [`stable.json`][stable] file is updated.
|
|
||||||
6. The build that was pushed to `beta` will now be pushed to `stable`.
|
|
||||||
|
|
||||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
|
||||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
|
||||||
|
|
||||||
[](https://www.openhomefoundation.org/)
|
|
||||||
|
48
azure-pipelines-ci.yml
Normal file
48
azure-pipelines-ci.yml
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- master
|
||||||
|
- dev
|
||||||
|
pr:
|
||||||
|
- dev
|
||||||
|
variables:
|
||||||
|
- name: versionHadolint
|
||||||
|
value: "v1.16.3"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: "Tox"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: "Use Python 3.7"
|
||||||
|
inputs:
|
||||||
|
versionSpec: "3.7"
|
||||||
|
- script: pip install tox
|
||||||
|
displayName: "Install Tox"
|
||||||
|
- script: tox
|
||||||
|
displayName: "Run Tox"
|
||||||
|
- job: "JQ"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: sudo apt-get install -y jq
|
||||||
|
displayName: "Install JQ"
|
||||||
|
- bash: |
|
||||||
|
shopt -s globstar
|
||||||
|
cat **/*.json | jq '.'
|
||||||
|
displayName: "Run JQ"
|
||||||
|
- job: "Hadolint"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||||
|
displayName: "Install Hadolint"
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -i \
|
||||||
|
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||||
|
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||||
|
displayName: "Run Hadolint"
|
56
azure-pipelines-release.yml
Normal file
56
azure-pipelines-release.yml
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
exclude:
|
||||||
|
- "*"
|
||||||
|
tags:
|
||||||
|
include:
|
||||||
|
- "*"
|
||||||
|
pr: none
|
||||||
|
variables:
|
||||||
|
- name: basePythonTag
|
||||||
|
value: "3.7-alpine3.11"
|
||||||
|
- name: versionBuilder
|
||||||
|
value: "6.9"
|
||||||
|
- group: docker
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: "VersionValidate"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: "Use Python 3.7"
|
||||||
|
inputs:
|
||||||
|
versionSpec: "3.7"
|
||||||
|
- script: |
|
||||||
|
setup_version="$(python setup.py -V)"
|
||||||
|
branch_version="$(Build.SourceBranchName)"
|
||||||
|
|
||||||
|
if [ "${branch_version}" == "dev" ]; then
|
||||||
|
exit 0
|
||||||
|
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||||
|
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
displayName: "Check version of branch/tag"
|
||||||
|
- job: "Release"
|
||||||
|
dependsOn:
|
||||||
|
- "VersionValidate"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||||
|
displayName: "Docker hub login"
|
||||||
|
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||||
|
displayName: "Install Builder"
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm --privileged \
|
||||||
|
-v ~/.docker:/root/.docker \
|
||||||
|
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||||
|
homeassistant/amd64-builder:$(versionBuilder) \
|
||||||
|
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||||
|
--all -t /data --docker-hub homeassistant
|
||||||
|
displayName: "Build Release"
|
60
azure-pipelines-wheels.yml
Normal file
60
azure-pipelines-wheels.yml
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
pr: none
|
||||||
|
variables:
|
||||||
|
- name: versionWheels
|
||||||
|
value: "1.6-3.7-alpine3.11"
|
||||||
|
- group: wheels
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: "Wheels"
|
||||||
|
timeoutInMinutes: 360
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
strategy:
|
||||||
|
maxParallel: 5
|
||||||
|
matrix:
|
||||||
|
amd64:
|
||||||
|
buildArch: "amd64"
|
||||||
|
i386:
|
||||||
|
buildArch: "i386"
|
||||||
|
armhf:
|
||||||
|
buildArch: "armhf"
|
||||||
|
armv7:
|
||||||
|
buildArch: "armv7"
|
||||||
|
aarch64:
|
||||||
|
buildArch: "aarch64"
|
||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends \
|
||||||
|
qemu-user-static \
|
||||||
|
binfmt-support \
|
||||||
|
curl
|
||||||
|
|
||||||
|
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||||
|
sudo update-binfmts --enable qemu-arm
|
||||||
|
sudo update-binfmts --enable qemu-aarch64
|
||||||
|
displayName: "Initial cross build"
|
||||||
|
- script: |
|
||||||
|
mkdir -p .ssh
|
||||||
|
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||||
|
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||||
|
chmod 600 .ssh/*
|
||||||
|
displayName: "Install ssh key"
|
||||||
|
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||||
|
displayName: "Install wheels builder"
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||||
|
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||||
|
--apk "build-base;libffi-dev;openssl-dev" \
|
||||||
|
--index $(wheelsIndex) \
|
||||||
|
--requirement requirements.txt \
|
||||||
|
--upload rsync \
|
||||||
|
--remote wheels@$(wheelsHost):/opt/wheels
|
||||||
|
displayName: "Run wheels build"
|
24
build.yaml
24
build.yaml
@ -1,24 +0,0 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
|
||||||
build_from:
|
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.22
|
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.22
|
|
||||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.22
|
|
||||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.22
|
|
||||||
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.22
|
|
||||||
codenotary:
|
|
||||||
signer: notary@home-assistant.io
|
|
||||||
base_image: notary@home-assistant.io
|
|
||||||
cosign:
|
|
||||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
|
||||||
identity: https://github.com/home-assistant/supervisor/.*
|
|
||||||
args:
|
|
||||||
COSIGN_VERSION: 2.4.3
|
|
||||||
labels:
|
|
||||||
io.hass.type: supervisor
|
|
||||||
org.opencontainers.image.title: Home Assistant Supervisor
|
|
||||||
org.opencontainers.image.description: Container-based system for managing Home Assistant Core installation
|
|
||||||
org.opencontainers.image.source: https://github.com/home-assistant/supervisor
|
|
||||||
org.opencontainers.image.authors: The Home Assistant Authors
|
|
||||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
|
||||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
|
||||||
org.opencontainers.image.licenses: Apache License 2.0
|
|
11
codecov.yaml
11
codecov.yaml
@ -1,11 +0,0 @@
|
|||||||
codecov:
|
|
||||||
branch: dev
|
|
||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default:
|
|
||||||
target: 40
|
|
||||||
threshold: 0.09
|
|
||||||
comment: false
|
|
||||||
github_checks:
|
|
||||||
annotations: false
|
|
13
entry.sh
Executable file
13
entry.sh
Executable file
@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
udevd --daemon
|
||||||
|
udevadm trigger
|
||||||
|
|
||||||
|
if CMD="$(command -v "$1")"; then
|
||||||
|
shift
|
||||||
|
exec "$CMD" "$@"
|
||||||
|
else
|
||||||
|
echo "Command not found: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
1
hassio/__init__.py
Normal file
1
hassio/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""Init file for Hass.io."""
|
63
hassio/__main__.py
Normal file
63
hassio/__main__.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
"""Main file for Hass.io."""
|
||||||
|
import asyncio
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from hassio import bootstrap
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_event_loop():
|
||||||
|
"""Attempt to use uvloop."""
|
||||||
|
try:
|
||||||
|
# pylint: disable=import-outside-toplevel
|
||||||
|
import uvloop
|
||||||
|
|
||||||
|
uvloop.install()
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return asyncio.get_event_loop()
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
if __name__ == "__main__":
|
||||||
|
bootstrap.initialize_logging()
|
||||||
|
|
||||||
|
# Init async event loop
|
||||||
|
loop = initialize_event_loop()
|
||||||
|
|
||||||
|
# Check if all information are available to setup Hass.io
|
||||||
|
if not bootstrap.check_environment():
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# init executor pool
|
||||||
|
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||||
|
loop.set_default_executor(executor)
|
||||||
|
|
||||||
|
_LOGGER.info("Initialize Hass.io setup")
|
||||||
|
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||||
|
loop.run_until_complete(coresys.core.connect())
|
||||||
|
|
||||||
|
bootstrap.supervisor_debugger(coresys)
|
||||||
|
bootstrap.migrate_system_env(coresys)
|
||||||
|
|
||||||
|
_LOGGER.info("Setup HassIO")
|
||||||
|
loop.run_until_complete(coresys.core.setup())
|
||||||
|
|
||||||
|
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||||
|
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||||
|
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Run Hass.io")
|
||||||
|
loop.run_forever()
|
||||||
|
finally:
|
||||||
|
_LOGGER.info("Stopping Hass.io")
|
||||||
|
loop.run_until_complete(coresys.core.stop())
|
||||||
|
executor.shutdown(wait=False)
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
_LOGGER.info("Close Hass.io")
|
||||||
|
sys.exit(0)
|
331
hassio/addons/__init__.py
Normal file
331
hassio/addons/__init__.py
Normal file
@ -0,0 +1,331 @@
|
|||||||
|
"""Init file for Hass.io add-ons."""
|
||||||
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
|
import logging
|
||||||
|
import tarfile
|
||||||
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
|
from ..const import BOOT_AUTO, STATE_STARTED
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ..exceptions import (
|
||||||
|
AddonsError,
|
||||||
|
AddonsNotSupportedError,
|
||||||
|
CoreDNSError,
|
||||||
|
DockerAPIError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
|
HostAppArmorError,
|
||||||
|
)
|
||||||
|
from ..store.addon import AddonStore
|
||||||
|
from .addon import Addon
|
||||||
|
from .data import AddonsData
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AnyAddon = Union[Addon, AddonStore]
|
||||||
|
|
||||||
|
|
||||||
|
class AddonManager(CoreSysAttributes):
|
||||||
|
"""Manage add-ons inside Hass.io."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys):
|
||||||
|
"""Initialize Docker base wrapper."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.data: AddonsData = AddonsData(coresys)
|
||||||
|
self.local: Dict[str, Addon] = {}
|
||||||
|
self.store: Dict[str, AddonStore] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all(self) -> List[AnyAddon]:
|
||||||
|
"""Return a list of all add-ons."""
|
||||||
|
addons = {**self.store, **self.local}
|
||||||
|
return list(addons.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed(self) -> List[Addon]:
|
||||||
|
"""Return a list of all installed add-ons."""
|
||||||
|
return list(self.local.values())
|
||||||
|
|
||||||
|
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
||||||
|
"""Return an add-on from slug.
|
||||||
|
|
||||||
|
Prio:
|
||||||
|
1 - Local
|
||||||
|
2 - Store
|
||||||
|
"""
|
||||||
|
if addon_slug in self.local:
|
||||||
|
return self.local[addon_slug]
|
||||||
|
return self.store.get(addon_slug)
|
||||||
|
|
||||||
|
def from_token(self, token: str) -> Optional[Addon]:
|
||||||
|
"""Return an add-on from Hass.io token."""
|
||||||
|
for addon in self.installed:
|
||||||
|
if token == addon.hassio_token:
|
||||||
|
return addon
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
|
"""Start up add-on management."""
|
||||||
|
tasks = []
|
||||||
|
for slug in self.data.system:
|
||||||
|
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||||
|
tasks.append(addon.load())
|
||||||
|
|
||||||
|
# Run initial tasks
|
||||||
|
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
|
# Sync DNS
|
||||||
|
await self.sync_dns()
|
||||||
|
|
||||||
|
async def boot(self, stage: str) -> None:
|
||||||
|
"""Boot add-ons with mode auto."""
|
||||||
|
tasks = []
|
||||||
|
for addon in self.installed:
|
||||||
|
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
||||||
|
continue
|
||||||
|
tasks.append(addon.start())
|
||||||
|
|
||||||
|
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks)
|
||||||
|
await asyncio.sleep(self.sys_config.wait_boot)
|
||||||
|
|
||||||
|
async def shutdown(self, stage: str) -> None:
|
||||||
|
"""Shutdown addons."""
|
||||||
|
tasks = []
|
||||||
|
for addon in self.installed:
|
||||||
|
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
||||||
|
continue
|
||||||
|
tasks.append(addon.stop())
|
||||||
|
|
||||||
|
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
|
async def install(self, slug: str) -> None:
|
||||||
|
"""Install an add-on."""
|
||||||
|
if slug in self.local:
|
||||||
|
_LOGGER.warning("Add-on %s is already installed", slug)
|
||||||
|
return
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
if not store:
|
||||||
|
_LOGGER.error("Add-on %s not exists", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
|
||||||
|
if not store.available:
|
||||||
|
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
self.data.install(store)
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
|
||||||
|
if not addon.path_data.is_dir():
|
||||||
|
_LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data)
|
||||||
|
addon.path_data.mkdir()
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await addon.install_apparmor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await addon.instance.install(store.version, store.image)
|
||||||
|
except DockerAPIError:
|
||||||
|
self.data.uninstall(addon)
|
||||||
|
raise AddonsError() from None
|
||||||
|
else:
|
||||||
|
self.local[slug] = addon
|
||||||
|
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||||
|
|
||||||
|
async def uninstall(self, slug: str) -> None:
|
||||||
|
"""Remove an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||||
|
return
|
||||||
|
addon = self.local.get(slug)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await addon.instance.remove()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
await addon.remove_data()
|
||||||
|
|
||||||
|
# Cleanup audio settings
|
||||||
|
if addon.path_asound.exists():
|
||||||
|
with suppress(OSError):
|
||||||
|
addon.path_asound.unlink()
|
||||||
|
|
||||||
|
# Cleanup AppArmor profile
|
||||||
|
with suppress(HostAppArmorError):
|
||||||
|
await addon.uninstall_apparmor()
|
||||||
|
|
||||||
|
# Cleanup Ingress panel from sidebar
|
||||||
|
if addon.ingress_panel:
|
||||||
|
addon.ingress_panel = False
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
|
# Cleanup discovery data
|
||||||
|
for message in self.sys_discovery.list_messages:
|
||||||
|
if message.addon != addon.slug:
|
||||||
|
continue
|
||||||
|
self.sys_discovery.remove(message)
|
||||||
|
|
||||||
|
# Cleanup services data
|
||||||
|
for service in self.sys_services.list_services:
|
||||||
|
if addon.slug not in service.active:
|
||||||
|
continue
|
||||||
|
service.del_service_data(addon)
|
||||||
|
|
||||||
|
self.data.uninstall(addon)
|
||||||
|
self.local.pop(slug)
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
|
async def update(self, slug: str) -> None:
|
||||||
|
"""Update add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.error("Add-on %s is not installed", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
addon = self.local.get(slug)
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
if addon.version == store.version:
|
||||||
|
_LOGGER.warning("No update available for add-on %s", slug)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if available, Maybe something have changed
|
||||||
|
if not store.available:
|
||||||
|
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
# Update instance
|
||||||
|
last_state = await addon.state()
|
||||||
|
try:
|
||||||
|
await addon.instance.update(store.version, store.image)
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await addon.instance.cleanup()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
else:
|
||||||
|
self.data.update(store)
|
||||||
|
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await addon.install_apparmor()
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await addon.start()
|
||||||
|
|
||||||
|
async def rebuild(self, slug: str) -> None:
|
||||||
|
"""Perform a rebuild of local build add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.error("Add-on %s is not installed", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
addon = self.local.get(slug)
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
# Check if a rebuild is possible now
|
||||||
|
if addon.version != store.version:
|
||||||
|
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||||
|
raise AddonsError()
|
||||||
|
if not addon.need_build:
|
||||||
|
_LOGGER.error("Can't rebuild a image based add-on")
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
# remove docker container but not addon config
|
||||||
|
last_state = await addon.state()
|
||||||
|
try:
|
||||||
|
await addon.instance.remove()
|
||||||
|
await addon.instance.install(addon.version)
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
else:
|
||||||
|
self.data.update(store)
|
||||||
|
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await addon.start()
|
||||||
|
|
||||||
|
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||||
|
"""Restore state of an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
await addon.restore(tar_file)
|
||||||
|
|
||||||
|
# Check if new
|
||||||
|
if slug in self.local:
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||||
|
self.local[slug] = addon
|
||||||
|
|
||||||
|
async def repair(self) -> None:
|
||||||
|
"""Repair local add-ons."""
|
||||||
|
needs_repair: List[Addon] = []
|
||||||
|
|
||||||
|
# Evaluate Add-ons to repair
|
||||||
|
for addon in self.installed:
|
||||||
|
if await addon.instance.exists():
|
||||||
|
continue
|
||||||
|
needs_repair.append(addon)
|
||||||
|
|
||||||
|
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||||
|
if not needs_repair:
|
||||||
|
return
|
||||||
|
|
||||||
|
for addon in needs_repair:
|
||||||
|
_LOGGER.info("Start repair for add-on: %s", addon.slug)
|
||||||
|
await self.sys_run_in_executor(
|
||||||
|
self.sys_docker.network.stale_cleanup, addon.instance.name
|
||||||
|
)
|
||||||
|
|
||||||
|
with suppress(DockerAPIError, KeyError):
|
||||||
|
# Need pull a image again
|
||||||
|
if not addon.need_build:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Need local lookup
|
||||||
|
if addon.need_build and not addon.is_detached:
|
||||||
|
store = self.store[addon.slug]
|
||||||
|
# If this add-on is available for rebuild
|
||||||
|
if addon.version == store.version:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
_LOGGER.error("Can't repair %s", addon.slug)
|
||||||
|
with suppress(AddonsError):
|
||||||
|
await self.uninstall(addon.slug)
|
||||||
|
|
||||||
|
async def sync_dns(self) -> None:
|
||||||
|
"""Sync add-ons DNS names."""
|
||||||
|
# Update hosts
|
||||||
|
for addon in self.installed:
|
||||||
|
if not await addon.instance.is_running():
|
||||||
|
continue
|
||||||
|
self.sys_dns.add_host(
|
||||||
|
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write hosts files
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
self.sys_dns.write_hosts()
|
668
hassio/addons/addon.py
Normal file
668
hassio/addons/addon.py
Normal file
@ -0,0 +1,668 @@
|
|||||||
|
"""Init file for Hass.io add-ons."""
|
||||||
|
from contextlib import suppress
|
||||||
|
from copy import deepcopy
|
||||||
|
from ipaddress import IPv4Address
|
||||||
|
import logging
|
||||||
|
from pathlib import Path, PurePath
|
||||||
|
import re
|
||||||
|
import secrets
|
||||||
|
import shutil
|
||||||
|
import tarfile
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from typing import Any, Awaitable, Dict, List, Optional
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_ACCESS_TOKEN,
|
||||||
|
ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_AUDIO_OUTPUT,
|
||||||
|
ATTR_AUTO_UPDATE,
|
||||||
|
ATTR_BOOT,
|
||||||
|
ATTR_IMAGE,
|
||||||
|
ATTR_INGRESS_ENTRY,
|
||||||
|
ATTR_INGRESS_PANEL,
|
||||||
|
ATTR_INGRESS_PORT,
|
||||||
|
ATTR_INGRESS_TOKEN,
|
||||||
|
ATTR_NETWORK,
|
||||||
|
ATTR_OPTIONS,
|
||||||
|
ATTR_PORTS,
|
||||||
|
ATTR_PROTECTED,
|
||||||
|
ATTR_SCHEMA,
|
||||||
|
ATTR_STATE,
|
||||||
|
ATTR_SYSTEM,
|
||||||
|
ATTR_USER,
|
||||||
|
ATTR_UUID,
|
||||||
|
ATTR_VERSION,
|
||||||
|
DNS_SUFFIX,
|
||||||
|
STATE_STARTED,
|
||||||
|
STATE_STOPPED,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSys
|
||||||
|
from ..docker.addon import DockerAddon
|
||||||
|
from ..docker.stats import DockerStats
|
||||||
|
from ..exceptions import (
|
||||||
|
AddonsError,
|
||||||
|
AddonsNotSupportedError,
|
||||||
|
DockerAPIError,
|
||||||
|
HostAppArmorError,
|
||||||
|
JsonFileError,
|
||||||
|
)
|
||||||
|
from ..utils.apparmor import adjust_profile
|
||||||
|
from ..utils.json import read_json_file, write_json_file
|
||||||
|
from ..utils.tar import exclude_filter, secure_path
|
||||||
|
from .model import AddonModel, Data
|
||||||
|
from .utils import remove_data
|
||||||
|
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
RE_WEBUI = re.compile(
|
||||||
|
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||||
|
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Addon(AddonModel):
|
||||||
|
"""Hold data for add-on inside Hass.io."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys, slug: str):
|
||||||
|
"""Initialize data holder."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||||
|
self.slug: str = slug
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
|
"""Async initialize of object."""
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.attach(tag=self.version)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ip_address(self) -> IPv4Address:
|
||||||
|
"""Return IP of Add-on instance."""
|
||||||
|
return self.instance.ip_address
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> Data:
|
||||||
|
"""Return add-on data/config."""
|
||||||
|
return self.sys_addons.data.system[self.slug]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data_store(self) -> Data:
|
||||||
|
"""Return add-on data from store."""
|
||||||
|
return self.sys_store.data.addons.get(self.slug, self.data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def persist(self) -> Data:
|
||||||
|
"""Return add-on data/config."""
|
||||||
|
return self.sys_addons.data.user[self.slug]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_installed(self) -> bool:
|
||||||
|
"""Return True if an add-on is installed."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_detached(self) -> bool:
|
||||||
|
"""Return True if add-on is detached."""
|
||||||
|
return self.slug not in self.sys_store.data.addons
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return True if this add-on is available on this platform."""
|
||||||
|
return self._available(self.data_store)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self) -> Optional[str]:
|
||||||
|
"""Return installed version."""
|
||||||
|
return self.persist[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self) -> List[str]:
|
||||||
|
"""Return list of DNS name for that add-on."""
|
||||||
|
return [f"{self.hostname}.{DNS_SUFFIX}"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def options(self) -> Dict[str, Any]:
|
||||||
|
"""Return options with local changes."""
|
||||||
|
return {**self.data[ATTR_OPTIONS], **self.persist[ATTR_OPTIONS]}
|
||||||
|
|
||||||
|
@options.setter
|
||||||
|
def options(self, value: Optional[Dict[str, Any]]):
|
||||||
|
"""Store user add-on options."""
|
||||||
|
if value is None:
|
||||||
|
self.persist[ATTR_OPTIONS] = {}
|
||||||
|
else:
|
||||||
|
self.persist[ATTR_OPTIONS] = deepcopy(value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot(self) -> bool:
|
||||||
|
"""Return boot config with prio local settings."""
|
||||||
|
return self.persist.get(ATTR_BOOT, super().boot)
|
||||||
|
|
||||||
|
@boot.setter
|
||||||
|
def boot(self, value: bool):
|
||||||
|
"""Store user boot options."""
|
||||||
|
self.persist[ATTR_BOOT] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_update(self) -> bool:
|
||||||
|
"""Return if auto update is enable."""
|
||||||
|
return self.persist.get(ATTR_AUTO_UPDATE, super().auto_update)
|
||||||
|
|
||||||
|
@auto_update.setter
|
||||||
|
def auto_update(self, value: bool):
|
||||||
|
"""Set auto update."""
|
||||||
|
self.persist[ATTR_AUTO_UPDATE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uuid(self) -> str:
|
||||||
|
"""Return an API token for this add-on."""
|
||||||
|
return self.persist[ATTR_UUID]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hassio_token(self) -> Optional[str]:
|
||||||
|
"""Return access token for Hass.io API."""
|
||||||
|
return self.persist.get(ATTR_ACCESS_TOKEN)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_token(self) -> Optional[str]:
|
||||||
|
"""Return access token for Hass.io API."""
|
||||||
|
return self.persist.get(ATTR_INGRESS_TOKEN)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_entry(self) -> Optional[str]:
|
||||||
|
"""Return ingress external URL."""
|
||||||
|
if self.with_ingress:
|
||||||
|
return f"/api/hassio_ingress/{self.ingress_token}"
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version(self) -> str:
|
||||||
|
"""Return version of add-on."""
|
||||||
|
return self.data_store[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def protected(self) -> bool:
|
||||||
|
"""Return if add-on is in protected mode."""
|
||||||
|
return self.persist[ATTR_PROTECTED]
|
||||||
|
|
||||||
|
@protected.setter
|
||||||
|
def protected(self, value: bool):
|
||||||
|
"""Set add-on in protected mode."""
|
||||||
|
self.persist[ATTR_PROTECTED] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||||
|
"""Return ports of add-on."""
|
||||||
|
return self.persist.get(ATTR_NETWORK, super().ports)
|
||||||
|
|
||||||
|
@ports.setter
|
||||||
|
def ports(self, value: Optional[Dict[str, Optional[int]]]):
|
||||||
|
"""Set custom ports of add-on."""
|
||||||
|
if value is None:
|
||||||
|
self.persist.pop(ATTR_NETWORK, None)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Secure map ports to value
|
||||||
|
new_ports = {}
|
||||||
|
for container_port, host_port in value.items():
|
||||||
|
if container_port in self.data.get(ATTR_PORTS, {}):
|
||||||
|
new_ports[container_port] = host_port
|
||||||
|
|
||||||
|
self.persist[ATTR_NETWORK] = new_ports
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_url(self) -> Optional[str]:
|
||||||
|
"""Return URL to ingress url."""
|
||||||
|
if not self.with_ingress:
|
||||||
|
return None
|
||||||
|
|
||||||
|
url = f"/api/hassio_ingress/{self.ingress_token}/"
|
||||||
|
if ATTR_INGRESS_ENTRY in self.data:
|
||||||
|
return f"{url}{self.data[ATTR_INGRESS_ENTRY]}"
|
||||||
|
return url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def webui(self) -> Optional[str]:
|
||||||
|
"""Return URL to webui or None."""
|
||||||
|
url = super().webui
|
||||||
|
if not url:
|
||||||
|
return None
|
||||||
|
webui = RE_WEBUI.match(url)
|
||||||
|
|
||||||
|
# extract arguments
|
||||||
|
t_port = webui.group("t_port")
|
||||||
|
t_proto = webui.group("t_proto")
|
||||||
|
s_prefix = webui.group("s_prefix") or ""
|
||||||
|
s_suffix = webui.group("s_suffix") or ""
|
||||||
|
|
||||||
|
# search host port for this docker port
|
||||||
|
if self.ports is None:
|
||||||
|
port = t_port
|
||||||
|
else:
|
||||||
|
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||||
|
|
||||||
|
# for interface config or port lists
|
||||||
|
if isinstance(port, (tuple, list)):
|
||||||
|
port = port[-1]
|
||||||
|
|
||||||
|
# lookup the correct protocol from config
|
||||||
|
if t_proto:
|
||||||
|
proto = "https" if self.options[t_proto] else "http"
|
||||||
|
else:
|
||||||
|
proto = s_prefix
|
||||||
|
|
||||||
|
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_port(self) -> Optional[int]:
|
||||||
|
"""Return Ingress port."""
|
||||||
|
if not self.with_ingress:
|
||||||
|
return None
|
||||||
|
|
||||||
|
port = self.data[ATTR_INGRESS_PORT]
|
||||||
|
if port == 0:
|
||||||
|
return self.sys_ingress.get_dynamic_port(self.slug)
|
||||||
|
return port
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_panel(self) -> Optional[bool]:
|
||||||
|
"""Return True if the add-on access support ingress."""
|
||||||
|
return self.persist[ATTR_INGRESS_PANEL]
|
||||||
|
|
||||||
|
@ingress_panel.setter
|
||||||
|
def ingress_panel(self, value: bool):
|
||||||
|
"""Return True if the add-on access support ingress."""
|
||||||
|
self.persist[ATTR_INGRESS_PANEL] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_output(self) -> Optional[str]:
|
||||||
|
"""Return ALSA config for output or None."""
|
||||||
|
if not self.with_audio:
|
||||||
|
return None
|
||||||
|
return self.persist.get(ATTR_AUDIO_OUTPUT, self.sys_host.alsa.default.output)
|
||||||
|
|
||||||
|
@audio_output.setter
|
||||||
|
def audio_output(self, value: Optional[str]):
|
||||||
|
"""Set/reset audio output settings."""
|
||||||
|
if value is None:
|
||||||
|
self.persist.pop(ATTR_AUDIO_OUTPUT, None)
|
||||||
|
else:
|
||||||
|
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def audio_input(self) -> Optional[str]:
|
||||||
|
"""Return ALSA config for input or None."""
|
||||||
|
if not self.with_audio:
|
||||||
|
return None
|
||||||
|
return self.persist.get(ATTR_AUDIO_INPUT, self.sys_host.alsa.default.input)
|
||||||
|
|
||||||
|
@audio_input.setter
|
||||||
|
def audio_input(self, value: Optional[str]):
|
||||||
|
"""Set/reset audio input settings."""
|
||||||
|
if value is None:
|
||||||
|
self.persist.pop(ATTR_AUDIO_INPUT, None)
|
||||||
|
else:
|
||||||
|
self.persist[ATTR_AUDIO_INPUT] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image(self):
|
||||||
|
"""Return image name of add-on."""
|
||||||
|
return self.persist.get(ATTR_IMAGE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def need_build(self):
|
||||||
|
"""Return True if this add-on need a local build."""
|
||||||
|
return ATTR_IMAGE not in self.data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_data(self):
|
||||||
|
"""Return add-on data path inside Supervisor."""
|
||||||
|
return Path(self.sys_config.path_addons_data, self.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_extern_data(self):
|
||||||
|
"""Return add-on data path external for Docker."""
|
||||||
|
return PurePath(self.sys_config.path_extern_addons_data, self.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_options(self):
|
||||||
|
"""Return path to add-on options."""
|
||||||
|
return Path(self.path_data, "options.json")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_asound(self):
|
||||||
|
"""Return path to asound config."""
|
||||||
|
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_extern_asound(self):
|
||||||
|
"""Return path to asound config for Docker."""
|
||||||
|
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
|
||||||
|
|
||||||
|
def save_persist(self):
|
||||||
|
"""Save data of add-on."""
|
||||||
|
self.sys_addons.data.save_data()
|
||||||
|
|
||||||
|
async def write_options(self):
|
||||||
|
"""Return True if add-on options is written to data."""
|
||||||
|
schema = self.schema
|
||||||
|
options = self.options
|
||||||
|
|
||||||
|
# Update secrets for validation
|
||||||
|
await self.sys_secrets.reload()
|
||||||
|
|
||||||
|
try:
|
||||||
|
options = schema(options)
|
||||||
|
write_json_file(self.path_options, options)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Add-on %s have wrong options: %s",
|
||||||
|
self.slug,
|
||||||
|
humanize_error(options, ex),
|
||||||
|
)
|
||||||
|
except JsonFileError:
|
||||||
|
_LOGGER.error("Add-on %s can't write options", self.slug)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Add-on %s write options: %s", self.slug, options)
|
||||||
|
return
|
||||||
|
|
||||||
|
raise AddonsError()
|
||||||
|
|
||||||
|
async def remove_data(self):
|
||||||
|
"""Remove add-on data."""
|
||||||
|
if not self.path_data.is_dir():
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Remove add-on data folder %s", self.path_data)
|
||||||
|
await remove_data(self.path_data)
|
||||||
|
|
||||||
|
def write_asound(self):
|
||||||
|
"""Write asound config to file and return True on success."""
|
||||||
|
asound_config = self.sys_host.alsa.asound(
|
||||||
|
alsa_input=self.audio_input, alsa_output=self.audio_output
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with self.path_asound.open("w") as config_file:
|
||||||
|
config_file.write(asound_config)
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Add-on %s can't write asound: %s", self.slug, err)
|
||||||
|
raise AddonsError()
|
||||||
|
|
||||||
|
_LOGGER.debug("Add-on %s write asound: %s", self.slug, self.path_asound)
|
||||||
|
|
||||||
|
async def install_apparmor(self) -> None:
|
||||||
|
"""Install or Update AppArmor profile for Add-on."""
|
||||||
|
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||||
|
exists_addon = self.path_apparmor.exists()
|
||||||
|
|
||||||
|
# Nothing to do
|
||||||
|
if not exists_local and not exists_addon:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Need removed
|
||||||
|
if exists_local and not exists_addon:
|
||||||
|
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Need install/update
|
||||||
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||||
|
profile_file = Path(tmp_folder, "apparmor.txt")
|
||||||
|
|
||||||
|
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||||
|
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||||
|
|
||||||
|
async def uninstall_apparmor(self) -> None:
|
||||||
|
"""Remove AppArmor profile for Add-on."""
|
||||||
|
if not self.sys_host.apparmor.exists(self.slug):
|
||||||
|
return
|
||||||
|
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||||
|
|
||||||
|
def test_update_schema(self) -> bool:
|
||||||
|
"""Check if the existing configuration is valid after update."""
|
||||||
|
# load next schema
|
||||||
|
new_raw_schema = self.data_store[ATTR_SCHEMA]
|
||||||
|
default_options = self.data_store[ATTR_OPTIONS]
|
||||||
|
|
||||||
|
# if disabled
|
||||||
|
if isinstance(new_raw_schema, bool):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# merge options
|
||||||
|
options = {**self.persist[ATTR_OPTIONS], **default_options}
|
||||||
|
|
||||||
|
# create voluptuous
|
||||||
|
new_schema = vol.Schema(
|
||||||
|
vol.All(dict, validate_options(self.coresys, new_raw_schema))
|
||||||
|
)
|
||||||
|
|
||||||
|
# validate
|
||||||
|
try:
|
||||||
|
new_schema(options)
|
||||||
|
except vol.Invalid:
|
||||||
|
_LOGGER.warning("Add-on %s new schema is not compatible", self.slug)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def state(self) -> str:
|
||||||
|
"""Return running state of add-on."""
|
||||||
|
if await self.instance.is_running():
|
||||||
|
return STATE_STARTED
|
||||||
|
return STATE_STOPPED
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
"""Set options and start add-on."""
|
||||||
|
if await self.instance.is_running():
|
||||||
|
_LOGGER.warning("%s already running!", self.slug)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Access Token
|
||||||
|
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||||
|
self.save_persist()
|
||||||
|
|
||||||
|
# Options
|
||||||
|
await self.write_options()
|
||||||
|
|
||||||
|
# Sound
|
||||||
|
if self.with_audio:
|
||||||
|
self.write_asound()
|
||||||
|
|
||||||
|
# Start Add-on
|
||||||
|
try:
|
||||||
|
await self.instance.run()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
"""Stop add-on."""
|
||||||
|
try:
|
||||||
|
return await self.instance.stop()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
async def restart(self) -> None:
|
||||||
|
"""Restart add-on."""
|
||||||
|
with suppress(AddonsError):
|
||||||
|
await self.stop()
|
||||||
|
await self.start()
|
||||||
|
|
||||||
|
def logs(self) -> Awaitable[bytes]:
|
||||||
|
"""Return add-ons log output.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.logs()
|
||||||
|
|
||||||
|
async def stats(self) -> DockerStats:
|
||||||
|
"""Return stats of container."""
|
||||||
|
try:
|
||||||
|
return await self.instance.stats()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
async def write_stdin(self, data):
|
||||||
|
"""Write data to add-on stdin.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
if not self.with_stdin:
|
||||||
|
_LOGGER.error("Add-on don't support write to stdin!")
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await self.instance.write_stdin(data)
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
async def snapshot(self, tar_file: tarfile.TarFile) -> None:
|
||||||
|
"""Snapshot state of an add-on."""
|
||||||
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
|
# store local image
|
||||||
|
if self.need_build:
|
||||||
|
try:
|
||||||
|
await self.instance.export_image(Path(temp, "image.tar"))
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
data = {
|
||||||
|
ATTR_USER: self.persist,
|
||||||
|
ATTR_SYSTEM: self.data,
|
||||||
|
ATTR_VERSION: self.version,
|
||||||
|
ATTR_STATE: await self.state(),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Store local configs/state
|
||||||
|
try:
|
||||||
|
write_json_file(Path(temp, "addon.json"), data)
|
||||||
|
except JsonFileError:
|
||||||
|
_LOGGER.error("Can't save meta for %s", self.slug)
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# Store AppArmor Profile
|
||||||
|
if self.sys_host.apparmor.exists(self.slug):
|
||||||
|
profile = Path(temp, "apparmor.txt")
|
||||||
|
try:
|
||||||
|
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||||
|
except HostAppArmorError:
|
||||||
|
_LOGGER.error("Can't backup AppArmor profile")
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# write into tarfile
|
||||||
|
def _write_tarfile():
|
||||||
|
"""Write tar inside loop."""
|
||||||
|
with tar_file as snapshot:
|
||||||
|
# Snapshot system
|
||||||
|
snapshot.add(temp, arcname=".")
|
||||||
|
|
||||||
|
# Snapshot data
|
||||||
|
snapshot.add(
|
||||||
|
self.path_data,
|
||||||
|
arcname="data",
|
||||||
|
filter=exclude_filter(self.snapshot_exclude),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Build snapshot for add-on %s", self.slug)
|
||||||
|
await self.sys_run_in_executor(_write_tarfile)
|
||||||
|
except (tarfile.TarError, OSError) as err:
|
||||||
|
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
_LOGGER.info("Finish snapshot for addon %s", self.slug)
|
||||||
|
|
||||||
|
async def restore(self, tar_file: tarfile.TarFile) -> None:
|
||||||
|
"""Restore state of an add-on."""
|
||||||
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
|
# extract snapshot
|
||||||
|
def _extract_tarfile():
|
||||||
|
"""Extract tar snapshot."""
|
||||||
|
with tar_file as snapshot:
|
||||||
|
snapshot.extractall(path=Path(temp), members=secure_path(snapshot))
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_run_in_executor(_extract_tarfile)
|
||||||
|
except tarfile.TarError as err:
|
||||||
|
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# Read snapshot data
|
||||||
|
try:
|
||||||
|
data = read_json_file(Path(temp, "addon.json"))
|
||||||
|
except JsonFileError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||||
|
except vol.Invalid as err:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Can't validate %s, snapshot data: %s",
|
||||||
|
self.slug,
|
||||||
|
humanize_error(data, err),
|
||||||
|
)
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# If available
|
||||||
|
if not self._available(data[ATTR_SYSTEM]):
|
||||||
|
_LOGGER.error("Add-on %s is not available for this Platform", self.slug)
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
# Restore local add-on informations
|
||||||
|
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||||
|
restore_image = self._image(data[ATTR_SYSTEM])
|
||||||
|
self.sys_addons.data.restore(
|
||||||
|
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check version / restore image
|
||||||
|
version = data[ATTR_VERSION]
|
||||||
|
if not await self.instance.exists():
|
||||||
|
_LOGGER.info("Restore/Install image for addon %s", self.slug)
|
||||||
|
|
||||||
|
image_file = Path(temp, "image.tar")
|
||||||
|
if image_file.is_file():
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.import_image(image_file)
|
||||||
|
else:
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.install(version, restore_image)
|
||||||
|
await self.instance.cleanup()
|
||||||
|
elif self.instance.version != version or self.legacy:
|
||||||
|
_LOGGER.info("Restore/Update image for addon %s", self.slug)
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.update(version, restore_image)
|
||||||
|
else:
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await self.instance.stop()
|
||||||
|
|
||||||
|
# Restore data
|
||||||
|
def _restore_data():
|
||||||
|
"""Restore data."""
|
||||||
|
shutil.copytree(Path(temp, "data"), self.path_data)
|
||||||
|
|
||||||
|
_LOGGER.info("Restore data for addon %s", self.slug)
|
||||||
|
if self.path_data.is_dir():
|
||||||
|
await remove_data(self.path_data)
|
||||||
|
try:
|
||||||
|
await self.sys_run_in_executor(_restore_data)
|
||||||
|
except shutil.Error as err:
|
||||||
|
_LOGGER.error("Can't restore origin data: %s", err)
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# Restore AppArmor
|
||||||
|
profile_file = Path(temp, "apparmor.txt")
|
||||||
|
if profile_file.exists():
|
||||||
|
try:
|
||||||
|
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||||
|
except HostAppArmorError:
|
||||||
|
_LOGGER.error("Can't restore AppArmor profile")
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
# Run add-on
|
||||||
|
if data[ATTR_STATE] == STATE_STARTED:
|
||||||
|
return await self.start()
|
||||||
|
|
||||||
|
_LOGGER.info("Finish restore for add-on %s", self.slug)
|
79
hassio/addons/build.py
Normal file
79
hassio/addons/build.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
"""Hass.io add-on build environment."""
|
||||||
|
from __future__ import annotations
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING, Dict
|
||||||
|
|
||||||
|
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ..utils.json import JsonConfig
|
||||||
|
from .validate import SCHEMA_BUILD_CONFIG
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from . import AnyAddon
|
||||||
|
|
||||||
|
|
||||||
|
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||||
|
"""Handle build options for add-ons."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||||
|
"""Initialize Hass.io add-on builder."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.addon = addon
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG
|
||||||
|
)
|
||||||
|
|
||||||
|
def save_data(self):
|
||||||
|
"""Ignore save function."""
|
||||||
|
raise RuntimeError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_image(self) -> str:
|
||||||
|
"""Base images for this add-on."""
|
||||||
|
return self._data[ATTR_BUILD_FROM].get(
|
||||||
|
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def squash(self) -> bool:
|
||||||
|
"""Return True or False if squash is active."""
|
||||||
|
return self._data[ATTR_SQUASH]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def additional_args(self) -> Dict[str, str]:
|
||||||
|
"""Return additional Docker build arguments."""
|
||||||
|
return self._data[ATTR_ARGS]
|
||||||
|
|
||||||
|
def get_docker_args(self, version):
|
||||||
|
"""Create a dict with Docker build arguments."""
|
||||||
|
args = {
|
||||||
|
"path": str(self.addon.path_location),
|
||||||
|
"tag": f"{self.addon.image}:{version}",
|
||||||
|
"pull": True,
|
||||||
|
"forcerm": True,
|
||||||
|
"squash": self.squash,
|
||||||
|
"labels": {
|
||||||
|
"io.hass.version": version,
|
||||||
|
"io.hass.arch": self.sys_arch.default,
|
||||||
|
"io.hass.type": META_ADDON,
|
||||||
|
"io.hass.name": self._fix_label("name"),
|
||||||
|
"io.hass.description": self._fix_label("description"),
|
||||||
|
},
|
||||||
|
"buildargs": {
|
||||||
|
"BUILD_FROM": self.base_image,
|
||||||
|
"BUILD_VERSION": version,
|
||||||
|
"BUILD_ARCH": self.sys_arch.default,
|
||||||
|
**self.additional_args,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.addon.url:
|
||||||
|
args["labels"]["io.hass.url"] = self.addon.url
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
def _fix_label(self, label_name: str) -> str:
|
||||||
|
"""Remove characters they are not supported."""
|
||||||
|
label = getattr(self.addon, label_name, "")
|
||||||
|
return label.replace("'", "")
|
@ -1,7 +1,7 @@
|
|||||||
"""Init file for Supervisor add-on data."""
|
"""Init file for Hass.io add-on data."""
|
||||||
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any
|
import logging
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_IMAGE,
|
ATTR_IMAGE,
|
||||||
@ -12,16 +12,18 @@ from ..const import (
|
|||||||
FILE_HASSIO_ADDONS,
|
FILE_HASSIO_ADDONS,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ..utils.json import JsonConfig
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
from ..utils.common import FileConfiguration
|
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .validate import SCHEMA_ADDONS_FILE
|
from .validate import SCHEMA_ADDONS_FILE
|
||||||
|
|
||||||
Config = dict[str, Any]
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
Config = Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class AddonsData(FileConfiguration, CoreSysAttributes):
|
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||||
"""Hold data for installed Add-ons inside Supervisor."""
|
"""Hold data for installed Add-ons inside Hass.io."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
def __init__(self, coresys: CoreSys):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
@ -38,7 +40,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
|||||||
"""Return local add-on data."""
|
"""Return local add-on data."""
|
||||||
return self._data[ATTR_SYSTEM]
|
return self._data[ATTR_SYSTEM]
|
||||||
|
|
||||||
async def install(self, addon: AddonStore) -> None:
|
def install(self, addon: AddonStore) -> None:
|
||||||
"""Set addon as installed."""
|
"""Set addon as installed."""
|
||||||
self.system[addon.slug] = deepcopy(addon.data)
|
self.system[addon.slug] = deepcopy(addon.data)
|
||||||
self.user[addon.slug] = {
|
self.user[addon.slug] = {
|
||||||
@ -46,28 +48,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
|||||||
ATTR_VERSION: addon.version,
|
ATTR_VERSION: addon.version,
|
||||||
ATTR_IMAGE: addon.image,
|
ATTR_IMAGE: addon.image,
|
||||||
}
|
}
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
|
||||||
async def uninstall(self, addon: Addon) -> None:
|
def uninstall(self, addon: Addon) -> None:
|
||||||
"""Set add-on as uninstalled."""
|
"""Set add-on as uninstalled."""
|
||||||
self.system.pop(addon.slug, None)
|
self.system.pop(addon.slug, None)
|
||||||
self.user.pop(addon.slug, None)
|
self.user.pop(addon.slug, None)
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
|
||||||
async def update(self, addon: AddonStore) -> None:
|
def update(self, addon: AddonStore) -> None:
|
||||||
"""Update version of add-on."""
|
"""Update version of add-on."""
|
||||||
self.system[addon.slug] = deepcopy(addon.data)
|
self.system[addon.slug] = deepcopy(addon.data)
|
||||||
self.user[addon.slug].update(
|
self.user[addon.slug].update(
|
||||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||||
)
|
)
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
|
||||||
async def restore(
|
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||||
self, slug: str, user: Config, system: Config, image: str
|
|
||||||
) -> None:
|
|
||||||
"""Restore data to add-on."""
|
"""Restore data to add-on."""
|
||||||
self.user[slug] = deepcopy(user)
|
self.user[slug] = deepcopy(user)
|
||||||
self.system[slug] = deepcopy(system)
|
self.system[slug] = deepcopy(system)
|
||||||
|
|
||||||
self.user[slug][ATTR_IMAGE] = image
|
self.user[slug][ATTR_IMAGE] = image
|
||||||
await self.save_data()
|
self.save_data()
|
@ -1,17 +1,9 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Hass.io add-ons."""
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from collections import defaultdict
|
|
||||||
from collections.abc import Awaitable, Callable
|
|
||||||
from contextlib import suppress
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any, Awaitable, Dict, List, Optional
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
from packaging import version as pkg_version
|
||||||
|
import voluptuous as vol
|
||||||
from supervisor.utils.dt import utc_from_timestamp
|
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADVANCED,
|
ATTR_ADVANCED,
|
||||||
@ -19,9 +11,7 @@ from ..const import (
|
|||||||
ATTR_ARCH,
|
ATTR_ARCH,
|
||||||
ATTR_AUDIO,
|
ATTR_AUDIO,
|
||||||
ATTR_AUTH_API,
|
ATTR_AUTH_API,
|
||||||
ATTR_BACKUP_EXCLUDE,
|
ATTR_AUTO_UART,
|
||||||
ATTR_BACKUP_POST,
|
|
||||||
ATTR_BACKUP_PRE,
|
|
||||||
ATTR_BOOT,
|
ATTR_BOOT,
|
||||||
ATTR_DESCRIPTON,
|
ATTR_DESCRIPTON,
|
||||||
ATTR_DEVICES,
|
ATTR_DEVICES,
|
||||||
@ -39,15 +29,11 @@ from ..const import (
|
|||||||
ATTR_HOST_IPC,
|
ATTR_HOST_IPC,
|
||||||
ATTR_HOST_NETWORK,
|
ATTR_HOST_NETWORK,
|
||||||
ATTR_HOST_PID,
|
ATTR_HOST_PID,
|
||||||
ATTR_HOST_UTS,
|
|
||||||
ATTR_IMAGE,
|
ATTR_IMAGE,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_STREAM,
|
|
||||||
ATTR_INIT,
|
|
||||||
ATTR_JOURNALD,
|
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATION,
|
ATTR_LOCATON,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@ -58,87 +44,50 @@ from ..const import (
|
|||||||
ATTR_PORTS,
|
ATTR_PORTS,
|
||||||
ATTR_PORTS_DESCRIPTION,
|
ATTR_PORTS_DESCRIPTION,
|
||||||
ATTR_PRIVILEGED,
|
ATTR_PRIVILEGED,
|
||||||
ATTR_REALTIME,
|
|
||||||
ATTR_REPOSITORY,
|
ATTR_REPOSITORY,
|
||||||
ATTR_SCHEMA,
|
ATTR_SCHEMA,
|
||||||
ATTR_SERVICES,
|
ATTR_SERVICES,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
|
ATTR_SNAPSHOT_EXCLUDE,
|
||||||
ATTR_STAGE,
|
ATTR_STAGE,
|
||||||
ATTR_STARTUP,
|
ATTR_STARTUP,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
ATTR_TRANSLATIONS,
|
|
||||||
ATTR_TYPE,
|
|
||||||
ATTR_UART,
|
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_TIMESTAMP,
|
|
||||||
ATTR_VIDEO,
|
|
||||||
ATTR_WATCHDOG,
|
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
SECURITY_DEFAULT,
|
SECURITY_DEFAULT,
|
||||||
SECURITY_DISABLE,
|
SECURITY_DISABLE,
|
||||||
SECURITY_PROFILE,
|
SECURITY_PROFILE,
|
||||||
AddonBoot,
|
AddonStages,
|
||||||
AddonBootConfig,
|
|
||||||
AddonStage,
|
|
||||||
AddonStartup,
|
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys
|
from ..coresys import CoreSysAttributes
|
||||||
from ..docker.const import Capabilities
|
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||||
from ..exceptions import AddonsNotSupportedError
|
|
||||||
from ..jobs.const import JOB_GROUP_ADDON
|
|
||||||
from ..jobs.job_group import JobGroup
|
|
||||||
from ..utils import version_is_new_enough
|
|
||||||
from .configuration import FolderMapping
|
|
||||||
from .const import (
|
|
||||||
ATTR_BACKUP,
|
|
||||||
ATTR_BREAKING_VERSIONS,
|
|
||||||
ATTR_CODENOTARY,
|
|
||||||
ATTR_PATH,
|
|
||||||
ATTR_READ_ONLY,
|
|
||||||
AddonBackupMode,
|
|
||||||
MappingType,
|
|
||||||
)
|
|
||||||
from .options import AddonOptions, UiOptions
|
|
||||||
from .validate import RE_SERVICE
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
Data = Dict[str, Any]
|
||||||
|
|
||||||
Data = dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
class AddonModel(JobGroup, ABC):
|
class AddonModel(CoreSysAttributes):
|
||||||
"""Add-on Data layout."""
|
"""Add-on Data layout."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, slug: str):
|
slug: str = None
|
||||||
"""Initialize data holder."""
|
|
||||||
super().__init__(
|
|
||||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
|
||||||
)
|
|
||||||
self.slug: str = slug
|
|
||||||
self._path_icon_exists: bool = False
|
|
||||||
self._path_logo_exists: bool = False
|
|
||||||
self._path_changelog_exists: bool = False
|
|
||||||
self._path_documentation_exists: bool = False
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
|
||||||
def data(self) -> Data:
|
def data(self) -> Data:
|
||||||
"""Return add-on config/data."""
|
"""Return Add-on config/data."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
|
||||||
def is_installed(self) -> bool:
|
def is_installed(self) -> bool:
|
||||||
"""Return True if an add-on is installed."""
|
"""Return True if an add-on is installed."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
|
||||||
def is_detached(self) -> bool:
|
def is_detached(self) -> bool:
|
||||||
"""Return True if add-on is detached."""
|
"""Return True if add-on is detached."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
@ -146,22 +95,17 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self._available(self.data)
|
return self._available(self.data)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def options(self) -> dict[str, Any]:
|
def options(self) -> Dict[str, Any]:
|
||||||
"""Return options with local changes."""
|
"""Return options with local changes."""
|
||||||
return self.data[ATTR_OPTIONS]
|
return self.data[ATTR_OPTIONS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot_config(self) -> AddonBootConfig:
|
def boot(self) -> bool:
|
||||||
"""Return boot config."""
|
"""Return boot config with prio local settings."""
|
||||||
return self.data[ATTR_BOOT]
|
return self.data[ATTR_BOOT]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self) -> AddonBoot:
|
def auto_update(self) -> Optional[bool]:
|
||||||
"""Return boot config with prio local settings unless config is forced."""
|
|
||||||
return AddonBoot(self.data[ATTR_BOOT])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def auto_update(self) -> bool | None:
|
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -176,7 +120,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.slug.replace("_", "-")
|
return self.slug.replace("_", "-")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dns(self) -> list[str]:
|
def dns(self) -> List[str]:
|
||||||
"""Return list of DNS name for that add-on."""
|
"""Return list of DNS name for that add-on."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@ -186,22 +130,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_TIMEOUT]
|
return self.data[ATTR_TIMEOUT]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def uuid(self) -> str | None:
|
def uuid(self) -> Optional[str]:
|
||||||
"""Return an API token for this add-on."""
|
"""Return an API token for this add-on."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supervisor_token(self) -> str | None:
|
def hassio_token(self) -> Optional[str]:
|
||||||
"""Return access token for Supervisor API."""
|
"""Return access token for Hass.io API."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_token(self) -> str | None:
|
def ingress_token(self) -> Optional[str]:
|
||||||
"""Return access token for Supervisor API."""
|
"""Return access token for Hass.io API."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_entry(self) -> str | None:
|
def ingress_entry(self) -> Optional[str]:
|
||||||
"""Return ingress external URL."""
|
"""Return ingress external URL."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -210,28 +154,31 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return description of add-on."""
|
"""Return description of add-on."""
|
||||||
return self.data[ATTR_DESCRIPTON]
|
return self.data[ATTR_DESCRIPTON]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_description(self) -> Optional[str]:
|
||||||
|
"""Return README.md as long_description."""
|
||||||
|
readme = Path(self.path_location, "README.md")
|
||||||
|
|
||||||
|
# If readme not exists
|
||||||
|
if not readme.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Return data
|
||||||
|
with readme.open("r") as readme_file:
|
||||||
|
return readme_file.read()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repository(self) -> str:
|
def repository(self) -> str:
|
||||||
"""Return repository of add-on."""
|
"""Return repository of add-on."""
|
||||||
return self.data[ATTR_REPOSITORY]
|
return self.data[ATTR_REPOSITORY]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def translations(self) -> dict:
|
def latest_version(self) -> str:
|
||||||
"""Return add-on translations."""
|
|
||||||
return self.data[ATTR_TRANSLATIONS]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def latest_version(self) -> AwesomeVersion:
|
|
||||||
"""Return latest version of add-on."""
|
"""Return latest version of add-on."""
|
||||||
return self.data[ATTR_VERSION]
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def latest_version_timestamp(self) -> datetime:
|
def version(self) -> str:
|
||||||
"""Return when latest version was first seen."""
|
|
||||||
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version(self) -> AwesomeVersion:
|
|
||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
return self.data[ATTR_VERSION]
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
@ -241,9 +188,9 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def startup(self) -> AddonStartup:
|
def startup(self) -> Optional[str]:
|
||||||
"""Return startup type of add-on."""
|
"""Return startup type of add-on."""
|
||||||
return self.data[ATTR_STARTUP]
|
return self.data.get(ATTR_STARTUP)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def advanced(self) -> bool:
|
def advanced(self) -> bool:
|
||||||
@ -251,55 +198,49 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_ADVANCED]
|
return self.data[ATTR_ADVANCED]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def stage(self) -> AddonStage:
|
def stage(self) -> AddonStages:
|
||||||
"""Return stage mode of add-on."""
|
"""Return stage mode of add-on."""
|
||||||
return self.data[ATTR_STAGE]
|
return self.data[ATTR_STAGE]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def services_role(self) -> dict[str, str]:
|
def services_role(self) -> Dict[str, str]:
|
||||||
"""Return dict of services with rights."""
|
"""Return dict of services with rights."""
|
||||||
services_list = self.data.get(ATTR_SERVICES, [])
|
services_list = self.data.get(ATTR_SERVICES, [])
|
||||||
|
|
||||||
services = {}
|
services = {}
|
||||||
for data in services_list:
|
for data in services_list:
|
||||||
service = RE_SERVICE.match(data)
|
service = RE_SERVICE.match(data)
|
||||||
if service:
|
services[service.group("service")] = service.group("rights")
|
||||||
services[service.group("service")] = service.group("rights")
|
|
||||||
|
|
||||||
return services
|
return services
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def discovery(self) -> list[str]:
|
def discovery(self) -> List[str]:
|
||||||
"""Return list of discoverable components/platforms."""
|
"""Return list of discoverable components/platforms."""
|
||||||
return self.data.get(ATTR_DISCOVERY, [])
|
return self.data.get(ATTR_DISCOVERY, [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports_description(self) -> dict[str, str] | None:
|
def ports_description(self) -> Optional[Dict[str, str]]:
|
||||||
"""Return descriptions of ports."""
|
"""Return descriptions of ports."""
|
||||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports(self) -> dict[str, int | None] | None:
|
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||||
"""Return ports of add-on."""
|
"""Return ports of add-on."""
|
||||||
return self.data.get(ATTR_PORTS)
|
return self.data.get(ATTR_PORTS)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_url(self) -> str | None:
|
def ingress_url(self) -> Optional[str]:
|
||||||
"""Return URL to ingress url."""
|
"""Return URL to ingress url."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def webui(self) -> str | None:
|
def webui(self) -> Optional[str]:
|
||||||
"""Return URL to webui or None."""
|
"""Return URL to webui or None."""
|
||||||
return self.data.get(ATTR_WEBUI)
|
return self.data.get(ATTR_WEBUI)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def watchdog_url(self) -> str | None:
|
def ingress_port(self) -> Optional[int]:
|
||||||
"""Return URL to for watchdog or None."""
|
|
||||||
return self.data.get(ATTR_WATCHDOG)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ingress_port(self) -> int | None:
|
|
||||||
"""Return Ingress port."""
|
"""Return Ingress port."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -333,28 +274,33 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return True if add-on run on host IPC namespace."""
|
"""Return True if add-on run on host IPC namespace."""
|
||||||
return self.data[ATTR_HOST_IPC]
|
return self.data[ATTR_HOST_IPC]
|
||||||
|
|
||||||
@property
|
|
||||||
def host_uts(self) -> bool:
|
|
||||||
"""Return True if add-on run on host UTS namespace."""
|
|
||||||
return self.data[ATTR_HOST_UTS]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host_dbus(self) -> bool:
|
def host_dbus(self) -> bool:
|
||||||
"""Return True if add-on run on host D-BUS."""
|
"""Return True if add-on run on host D-BUS."""
|
||||||
return self.data[ATTR_HOST_DBUS]
|
return self.data[ATTR_HOST_DBUS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def static_devices(self) -> list[Path]:
|
def devices(self) -> Optional[List[str]]:
|
||||||
"""Return static devices of add-on."""
|
"""Return devices of add-on."""
|
||||||
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
return self.data.get(ATTR_DEVICES, [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def environment(self) -> dict[str, str] | None:
|
def auto_uart(self) -> bool:
|
||||||
|
"""Return True if we should map all UART device."""
|
||||||
|
return self.data[ATTR_AUTO_UART]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tmpfs(self) -> Optional[str]:
|
||||||
|
"""Return tmpfs of add-on."""
|
||||||
|
return self.data.get(ATTR_TMPFS)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def environment(self) -> Optional[Dict[str, str]]:
|
||||||
"""Return environment of add-on."""
|
"""Return environment of add-on."""
|
||||||
return self.data.get(ATTR_ENVIRONMENT)
|
return self.data.get(ATTR_ENVIRONMENT)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def privileged(self) -> list[Capabilities]:
|
def privileged(self) -> List[str]:
|
||||||
"""Return list of privilege."""
|
"""Return list of privilege."""
|
||||||
return self.data.get(ATTR_PRIVILEGED, [])
|
return self.data.get(ATTR_PRIVILEGED, [])
|
||||||
|
|
||||||
@ -379,7 +325,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def access_hassio_api(self) -> bool:
|
def access_hassio_api(self) -> bool:
|
||||||
"""Return True if the add-on access to Supervisor REASTful API."""
|
"""Return True if the add-on access to Hass.io REASTful API."""
|
||||||
return self.data[ATTR_HASSIO_API]
|
return self.data[ATTR_HASSIO_API]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -389,33 +335,13 @@ class AddonModel(JobGroup, ABC):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def hassio_role(self) -> str:
|
def hassio_role(self) -> str:
|
||||||
"""Return Supervisor role for API."""
|
"""Return Hass.io role for API."""
|
||||||
return self.data[ATTR_HASSIO_ROLE]
|
return self.data[ATTR_HASSIO_ROLE]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def backup_exclude(self) -> list[str]:
|
def snapshot_exclude(self) -> List[str]:
|
||||||
"""Return Exclude list for backup."""
|
"""Return Exclude list for snapshot."""
|
||||||
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
|
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
|
||||||
|
|
||||||
@property
|
|
||||||
def backup_pre(self) -> str | None:
|
|
||||||
"""Return pre-backup command."""
|
|
||||||
return self.data.get(ATTR_BACKUP_PRE)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def backup_post(self) -> str | None:
|
|
||||||
"""Return post-backup command."""
|
|
||||||
return self.data.get(ATTR_BACKUP_POST)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def backup_mode(self) -> AddonBackupMode:
|
|
||||||
"""Return if backup is hot/cold."""
|
|
||||||
return self.data[ATTR_BACKUP]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default_init(self) -> bool:
|
|
||||||
"""Return True if the add-on have no own init."""
|
|
||||||
return self.data[ATTR_INIT]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_stdin(self) -> bool:
|
def with_stdin(self) -> bool:
|
||||||
@ -428,30 +354,15 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_INGRESS]
|
return self.data[ATTR_INGRESS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_panel(self) -> bool | None:
|
def ingress_panel(self) -> Optional[bool]:
|
||||||
"""Return True if the add-on access support ingress."""
|
"""Return True if the add-on access support ingress."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def ingress_stream(self) -> bool:
|
|
||||||
"""Return True if post requests to ingress should be streamed."""
|
|
||||||
return self.data[ATTR_INGRESS_STREAM]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_gpio(self) -> bool:
|
def with_gpio(self) -> bool:
|
||||||
"""Return True if the add-on access to GPIO interface."""
|
"""Return True if the add-on access to GPIO interface."""
|
||||||
return self.data[ATTR_GPIO]
|
return self.data[ATTR_GPIO]
|
||||||
|
|
||||||
@property
|
|
||||||
def with_usb(self) -> bool:
|
|
||||||
"""Return True if the add-on need USB access."""
|
|
||||||
return self.data[ATTR_USB]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def with_uart(self) -> bool:
|
|
||||||
"""Return True if we should map all UART device."""
|
|
||||||
return self.data[ATTR_UART]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_udev(self) -> bool:
|
def with_udev(self) -> bool:
|
||||||
"""Return True if the add-on have his own udev."""
|
"""Return True if the add-on have his own udev."""
|
||||||
@ -462,11 +373,6 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return True if the add-on access to kernel modules."""
|
"""Return True if the add-on access to kernel modules."""
|
||||||
return self.data[ATTR_KERNEL_MODULES]
|
return self.data[ATTR_KERNEL_MODULES]
|
||||||
|
|
||||||
@property
|
|
||||||
def with_realtime(self) -> bool:
|
|
||||||
"""Return True if the add-on need realtime schedule functions."""
|
|
||||||
return self.data[ATTR_REALTIME]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_full_access(self) -> bool:
|
def with_full_access(self) -> bool:
|
||||||
"""Return True if the add-on want full access to hardware."""
|
"""Return True if the add-on want full access to hardware."""
|
||||||
@ -477,11 +383,6 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return True if the add-on read access to devicetree."""
|
"""Return True if the add-on read access to devicetree."""
|
||||||
return self.data[ATTR_DEVICETREE]
|
return self.data[ATTR_DEVICETREE]
|
||||||
|
|
||||||
@property
|
|
||||||
def with_tmpfs(self) -> str | None:
|
|
||||||
"""Return if tmp is in memory of add-on."""
|
|
||||||
return self.data[ATTR_TMPFS]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def access_auth_api(self) -> bool:
|
def access_auth_api(self) -> bool:
|
||||||
"""Return True if the add-on access to login/auth backend."""
|
"""Return True if the add-on access to login/auth backend."""
|
||||||
@ -493,60 +394,47 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_AUDIO]
|
return self.data[ATTR_AUDIO]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_video(self) -> bool:
|
def homeassistant_version(self) -> Optional[str]:
|
||||||
"""Return True if the add-on access to video."""
|
|
||||||
return self.data[ATTR_VIDEO]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def homeassistant_version(self) -> str | None:
|
|
||||||
"""Return min Home Assistant version they needed by Add-on."""
|
"""Return min Home Assistant version they needed by Add-on."""
|
||||||
return self.data.get(ATTR_HOMEASSISTANT)
|
return self.data.get(ATTR_HOMEASSISTANT)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self) -> str | None:
|
def url(self) -> Optional[str]:
|
||||||
"""Return URL of add-on."""
|
"""Return URL of add-on."""
|
||||||
return self.data.get(ATTR_URL)
|
return self.data.get(ATTR_URL)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_icon(self) -> bool:
|
def with_icon(self) -> bool:
|
||||||
"""Return True if an icon exists."""
|
"""Return True if an icon exists."""
|
||||||
return self._path_icon_exists
|
return self.path_icon.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self) -> bool:
|
def with_logo(self) -> bool:
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self._path_logo_exists
|
return self.path_logo.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_changelog(self) -> bool:
|
def with_changelog(self) -> bool:
|
||||||
"""Return True if a changelog exists."""
|
"""Return True if a changelog exists."""
|
||||||
return self._path_changelog_exists
|
return self.path_changelog.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_documentation(self) -> bool:
|
def with_documentation(self) -> bool:
|
||||||
"""Return True if a documentation exists."""
|
"""Return True if a documentation exists."""
|
||||||
return self._path_documentation_exists
|
return self.path_documentation.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self) -> list[str]:
|
def supported_arch(self) -> List[str]:
|
||||||
"""Return list of supported arch."""
|
"""Return list of supported arch."""
|
||||||
return self.data[ATTR_ARCH]
|
return self.data[ATTR_ARCH]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_machine(self) -> list[str]:
|
def supported_machine(self) -> List[str]:
|
||||||
"""Return list of supported machine."""
|
"""Return list of supported machine."""
|
||||||
return self.data.get(ATTR_MACHINE, [])
|
return self.data.get(ATTR_MACHINE, [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def arch(self) -> str:
|
def image(self) -> str:
|
||||||
"""Return architecture to use for the addon's image."""
|
|
||||||
if ATTR_IMAGE in self.data:
|
|
||||||
return self.sys_arch.match(self.data[ATTR_ARCH])
|
|
||||||
|
|
||||||
return self.sys_arch.default
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image(self) -> str | None:
|
|
||||||
"""Generate image name from data."""
|
"""Generate image name from data."""
|
||||||
return self._image(self.data)
|
return self._image(self.data)
|
||||||
|
|
||||||
@ -556,20 +444,19 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return ATTR_IMAGE not in self.data
|
return ATTR_IMAGE not in self.data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def map_volumes(self) -> dict[MappingType, FolderMapping]:
|
def map_volumes(self) -> Dict[str, str]:
|
||||||
"""Return a dict of {MappingType: FolderMapping} from add-on."""
|
"""Return a dict of {volume: policy} from add-on."""
|
||||||
volumes = {}
|
volumes = {}
|
||||||
for volume in self.data[ATTR_MAP]:
|
for volume in self.data[ATTR_MAP]:
|
||||||
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
|
result = RE_VOLUME.match(volume)
|
||||||
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
|
volumes[result.group(1)] = result.group(2) or "ro"
|
||||||
)
|
|
||||||
|
|
||||||
return volumes
|
return volumes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_location(self) -> Path:
|
def path_location(self) -> Path:
|
||||||
"""Return path to this add-on."""
|
"""Return path to this add-on."""
|
||||||
return Path(self.data[ATTR_LOCATION])
|
return Path(self.data[ATTR_LOCATON])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_icon(self) -> Path:
|
def path_icon(self) -> Path:
|
||||||
@ -597,120 +484,45 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return Path(self.path_location, "apparmor.txt")
|
return Path(self.path_location, "apparmor.txt")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def schema(self) -> AddonOptions:
|
def schema(self) -> vol.Schema:
|
||||||
"""Return Addon options validation object."""
|
"""Create a schema for add-on options."""
|
||||||
raw_schema = self.data[ATTR_SCHEMA]
|
raw_schema = self.data[ATTR_SCHEMA]
|
||||||
if isinstance(raw_schema, bool):
|
|
||||||
raw_schema = {}
|
|
||||||
|
|
||||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
if isinstance(raw_schema, bool):
|
||||||
|
return vol.Schema(dict)
|
||||||
|
return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema)))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def schema_ui(self) -> list[dict[Any, Any]] | None:
|
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
|
||||||
"""Create a UI schema for add-on options."""
|
"""Create a UI schema for add-on options."""
|
||||||
raw_schema = self.data[ATTR_SCHEMA]
|
raw_schema = self.data[ATTR_SCHEMA]
|
||||||
|
|
||||||
if isinstance(raw_schema, bool):
|
if isinstance(raw_schema, bool):
|
||||||
return None
|
return None
|
||||||
return UiOptions(self.coresys)(raw_schema)
|
return schema_ui_options(raw_schema)
|
||||||
|
|
||||||
@property
|
def __eq__(self, other):
|
||||||
def with_journald(self) -> bool:
|
"""Compaired add-on objects."""
|
||||||
"""Return True if the add-on accesses the system journal."""
|
|
||||||
return self.data[ATTR_JOURNALD]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def signed(self) -> bool:
|
|
||||||
"""Return True if the image is signed."""
|
|
||||||
return ATTR_CODENOTARY in self.data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def codenotary(self) -> str | None:
|
|
||||||
"""Return Signer email address for CAS."""
|
|
||||||
return self.data.get(ATTR_CODENOTARY)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def breaking_versions(self) -> list[AwesomeVersion]:
|
|
||||||
"""Return breaking versions of addon."""
|
|
||||||
return self.data[ATTR_BREAKING_VERSIONS]
|
|
||||||
|
|
||||||
async def long_description(self) -> str | None:
|
|
||||||
"""Return README.md as long_description."""
|
|
||||||
|
|
||||||
def read_readme() -> str | None:
|
|
||||||
readme = Path(self.path_location, "README.md")
|
|
||||||
|
|
||||||
# If readme not exists
|
|
||||||
if not readme.exists():
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Return data
|
|
||||||
return readme.read_text(encoding="utf-8")
|
|
||||||
|
|
||||||
return await self.sys_run_in_executor(read_readme)
|
|
||||||
|
|
||||||
def refresh_path_cache(self) -> Awaitable[None]:
|
|
||||||
"""Refresh cache of existing paths."""
|
|
||||||
|
|
||||||
def check_paths():
|
|
||||||
self._path_icon_exists = self.path_icon.exists()
|
|
||||||
self._path_logo_exists = self.path_logo.exists()
|
|
||||||
self._path_changelog_exists = self.path_changelog.exists()
|
|
||||||
self._path_documentation_exists = self.path_documentation.exists()
|
|
||||||
|
|
||||||
return self.sys_run_in_executor(check_paths)
|
|
||||||
|
|
||||||
def validate_availability(self) -> None:
|
|
||||||
"""Validate if addon is available for current system."""
|
|
||||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
|
||||||
|
|
||||||
def __eq__(self, other: Any) -> bool:
|
|
||||||
"""Compare add-on objects."""
|
|
||||||
if not isinstance(other, AddonModel):
|
if not isinstance(other, AddonModel):
|
||||||
return False
|
return False
|
||||||
return self.slug == other.slug
|
return self.slug == other.slug
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
def _available(self, config) -> bool:
|
||||||
"""Hash for add-on objects."""
|
"""Return True if this add-on is available on this platform."""
|
||||||
return hash(self.slug)
|
|
||||||
|
|
||||||
def _validate_availability(
|
|
||||||
self, config, *, logger: Callable[..., None] | None = None
|
|
||||||
) -> None:
|
|
||||||
"""Validate if addon is available for current system."""
|
|
||||||
# Architecture
|
# Architecture
|
||||||
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||||
raise AddonsNotSupportedError(
|
return False
|
||||||
f"Add-on {self.slug} not supported on this platform, supported architectures: {', '.join(config[ATTR_ARCH])}",
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Machine / Hardware
|
# Machine / Hardware
|
||||||
machine = config.get(ATTR_MACHINE)
|
machine = config.get(ATTR_MACHINE)
|
||||||
if machine and (
|
if machine and self.sys_machine not in machine:
|
||||||
f"!{self.sys_machine}" in machine or self.sys_machine not in machine
|
return False
|
||||||
):
|
|
||||||
raise AddonsNotSupportedError(
|
|
||||||
f"Add-on {self.slug} not supported on this machine, supported machine types: {', '.join(machine)}",
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Home Assistant
|
# Home Assistant
|
||||||
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||||
with suppress(AwesomeVersionException, TypeError):
|
if pkg_version.parse(self.sys_homeassistant.version) < pkg_version.parse(
|
||||||
if version and not version_is_new_enough(
|
version
|
||||||
self.sys_homeassistant.version, version
|
):
|
||||||
):
|
|
||||||
raise AddonsNotSupportedError(
|
|
||||||
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _available(self, config) -> bool:
|
|
||||||
"""Return True if this add-on is available on this platform."""
|
|
||||||
try:
|
|
||||||
self._validate_availability(config)
|
|
||||||
except AddonsNotSupportedError:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -724,3 +536,19 @@ class AddonModel(JobGroup, ABC):
|
|||||||
|
|
||||||
# local build
|
# local build
|
||||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||||
|
|
||||||
|
def install(self) -> Awaitable[None]:
|
||||||
|
"""Install this add-on."""
|
||||||
|
return self.sys_addons.install(self.slug)
|
||||||
|
|
||||||
|
def uninstall(self) -> Awaitable[None]:
|
||||||
|
"""Uninstall this add-on."""
|
||||||
|
return self.sys_addons.uninstall(self.slug)
|
||||||
|
|
||||||
|
def update(self) -> Awaitable[None]:
|
||||||
|
"""Update this add-on."""
|
||||||
|
return self.sys_addons.update(self.slug)
|
||||||
|
|
||||||
|
def rebuild(self) -> Awaitable[None]:
|
||||||
|
"""Rebuild this add-on."""
|
||||||
|
return self.sys_addons.rebuild(self.slug)
|
101
hassio/addons/utils.py
Normal file
101
hassio/addons/utils.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
"""Util add-ons functions."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
PRIVILEGED_DAC_READ_SEARCH,
|
||||||
|
PRIVILEGED_NET_ADMIN,
|
||||||
|
PRIVILEGED_SYS_ADMIN,
|
||||||
|
PRIVILEGED_SYS_MODULE,
|
||||||
|
PRIVILEGED_SYS_PTRACE,
|
||||||
|
PRIVILEGED_SYS_RAWIO,
|
||||||
|
ROLE_ADMIN,
|
||||||
|
ROLE_MANAGER,
|
||||||
|
SECURITY_DISABLE,
|
||||||
|
SECURITY_PROFILE,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .model import AddonModel
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def rating_security(addon: AddonModel) -> int:
|
||||||
|
"""Return 1-6 for security rating.
|
||||||
|
|
||||||
|
1 = not secure
|
||||||
|
6 = high secure
|
||||||
|
"""
|
||||||
|
rating = 5
|
||||||
|
|
||||||
|
# AppArmor
|
||||||
|
if addon.apparmor == SECURITY_DISABLE:
|
||||||
|
rating += -1
|
||||||
|
elif addon.apparmor == SECURITY_PROFILE:
|
||||||
|
rating += 1
|
||||||
|
|
||||||
|
# Home Assistant Login & Ingress
|
||||||
|
if addon.with_ingress:
|
||||||
|
rating += 2
|
||||||
|
elif addon.access_auth_api:
|
||||||
|
rating += 1
|
||||||
|
|
||||||
|
# Privileged options
|
||||||
|
if any(
|
||||||
|
privilege in addon.privileged
|
||||||
|
for privilege in (
|
||||||
|
PRIVILEGED_NET_ADMIN,
|
||||||
|
PRIVILEGED_SYS_ADMIN,
|
||||||
|
PRIVILEGED_SYS_RAWIO,
|
||||||
|
PRIVILEGED_SYS_PTRACE,
|
||||||
|
PRIVILEGED_SYS_MODULE,
|
||||||
|
PRIVILEGED_DAC_READ_SEARCH,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
rating += -1
|
||||||
|
|
||||||
|
# API Hass.io role
|
||||||
|
if addon.hassio_role == ROLE_MANAGER:
|
||||||
|
rating += -1
|
||||||
|
elif addon.hassio_role == ROLE_ADMIN:
|
||||||
|
rating += -2
|
||||||
|
|
||||||
|
# Not secure Networking
|
||||||
|
if addon.host_network:
|
||||||
|
rating += -1
|
||||||
|
|
||||||
|
# Insecure PID namespace
|
||||||
|
if addon.host_pid:
|
||||||
|
rating += -2
|
||||||
|
|
||||||
|
# Full Access
|
||||||
|
if addon.with_full_access:
|
||||||
|
rating += -2
|
||||||
|
|
||||||
|
# Docker Access
|
||||||
|
if addon.access_docker_api:
|
||||||
|
rating = 1
|
||||||
|
|
||||||
|
return max(min(6, rating), 1)
|
||||||
|
|
||||||
|
|
||||||
|
async def remove_data(folder: Path) -> None:
|
||||||
|
"""Remove folder and reset privileged."""
|
||||||
|
try:
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||||
|
)
|
||||||
|
|
||||||
|
_, error_msg = await proc.communicate()
|
||||||
|
except OSError as err:
|
||||||
|
error_msg = str(err)
|
||||||
|
else:
|
||||||
|
if proc.returncode == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
571
hassio/addons/validate.py
Normal file
571
hassio/addons/validate.py
Normal file
@ -0,0 +1,571 @@
|
|||||||
|
"""Validate add-ons options schema."""
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import secrets
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ARCH_ALL,
|
||||||
|
ATTR_ACCESS_TOKEN,
|
||||||
|
ATTR_ADVANCED,
|
||||||
|
ATTR_APPARMOR,
|
||||||
|
ATTR_ARCH,
|
||||||
|
ATTR_ARGS,
|
||||||
|
ATTR_AUDIO,
|
||||||
|
ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_AUDIO_OUTPUT,
|
||||||
|
ATTR_AUTH_API,
|
||||||
|
ATTR_AUTO_UART,
|
||||||
|
ATTR_AUTO_UPDATE,
|
||||||
|
ATTR_BOOT,
|
||||||
|
ATTR_BUILD_FROM,
|
||||||
|
ATTR_DESCRIPTON,
|
||||||
|
ATTR_DEVICES,
|
||||||
|
ATTR_DEVICETREE,
|
||||||
|
ATTR_DISCOVERY,
|
||||||
|
ATTR_DOCKER_API,
|
||||||
|
ATTR_ENVIRONMENT,
|
||||||
|
ATTR_FULL_ACCESS,
|
||||||
|
ATTR_GPIO,
|
||||||
|
ATTR_HASSIO_API,
|
||||||
|
ATTR_HASSIO_ROLE,
|
||||||
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_HOMEASSISTANT_API,
|
||||||
|
ATTR_HOST_DBUS,
|
||||||
|
ATTR_HOST_IPC,
|
||||||
|
ATTR_HOST_NETWORK,
|
||||||
|
ATTR_HOST_PID,
|
||||||
|
ATTR_IMAGE,
|
||||||
|
ATTR_INGRESS,
|
||||||
|
ATTR_INGRESS_ENTRY,
|
||||||
|
ATTR_INGRESS_PANEL,
|
||||||
|
ATTR_INGRESS_PORT,
|
||||||
|
ATTR_INGRESS_TOKEN,
|
||||||
|
ATTR_KERNEL_MODULES,
|
||||||
|
ATTR_LEGACY,
|
||||||
|
ATTR_LOCATON,
|
||||||
|
ATTR_MACHINE,
|
||||||
|
ATTR_MAP,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_NETWORK,
|
||||||
|
ATTR_OPTIONS,
|
||||||
|
ATTR_PANEL_ADMIN,
|
||||||
|
ATTR_PANEL_ICON,
|
||||||
|
ATTR_PANEL_TITLE,
|
||||||
|
ATTR_PORTS,
|
||||||
|
ATTR_PORTS_DESCRIPTION,
|
||||||
|
ATTR_PRIVILEGED,
|
||||||
|
ATTR_PROTECTED,
|
||||||
|
ATTR_REPOSITORY,
|
||||||
|
ATTR_SCHEMA,
|
||||||
|
ATTR_SERVICES,
|
||||||
|
ATTR_SLUG,
|
||||||
|
ATTR_SNAPSHOT_EXCLUDE,
|
||||||
|
ATTR_SQUASH,
|
||||||
|
ATTR_STAGE,
|
||||||
|
ATTR_STARTUP,
|
||||||
|
ATTR_STATE,
|
||||||
|
ATTR_STDIN,
|
||||||
|
ATTR_SYSTEM,
|
||||||
|
ATTR_TIMEOUT,
|
||||||
|
ATTR_TMPFS,
|
||||||
|
ATTR_UDEV,
|
||||||
|
ATTR_URL,
|
||||||
|
ATTR_USER,
|
||||||
|
ATTR_UUID,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_WEBUI,
|
||||||
|
BOOT_AUTO,
|
||||||
|
BOOT_MANUAL,
|
||||||
|
PRIVILEGED_ALL,
|
||||||
|
ROLE_ALL,
|
||||||
|
ROLE_DEFAULT,
|
||||||
|
STARTUP_ALL,
|
||||||
|
STARTUP_APPLICATION,
|
||||||
|
STARTUP_SERVICES,
|
||||||
|
STATE_STARTED,
|
||||||
|
STATE_STOPPED,
|
||||||
|
AddonStages,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSys
|
||||||
|
from ..discovery.validate import valid_discovery_service
|
||||||
|
from ..validate import (
|
||||||
|
DOCKER_PORTS,
|
||||||
|
DOCKER_PORTS_DESCRIPTION,
|
||||||
|
alsa_device,
|
||||||
|
network_port,
|
||||||
|
token,
|
||||||
|
uuid_match,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||||
|
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||||
|
|
||||||
|
V_STR = "str"
|
||||||
|
V_INT = "int"
|
||||||
|
V_FLOAT = "float"
|
||||||
|
V_BOOL = "bool"
|
||||||
|
V_PASSWORD = "password"
|
||||||
|
V_EMAIL = "email"
|
||||||
|
V_URL = "url"
|
||||||
|
V_PORT = "port"
|
||||||
|
V_MATCH = "match"
|
||||||
|
V_LIST = "list"
|
||||||
|
|
||||||
|
RE_SCHEMA_ELEMENT = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"|bool|email|url|port"
|
||||||
|
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||||
|
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||||
|
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||||
|
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||||
|
r"|match\((?P<match>.*)\)"
|
||||||
|
r"|list\((?P<list>.+)\)"
|
||||||
|
r")\??$"
|
||||||
|
)
|
||||||
|
|
||||||
|
_SCHEMA_LENGTH_PARTS = (
|
||||||
|
"i_min",
|
||||||
|
"i_max",
|
||||||
|
"f_min",
|
||||||
|
"f_max",
|
||||||
|
"s_min",
|
||||||
|
"s_max",
|
||||||
|
"p_min",
|
||||||
|
"p_max",
|
||||||
|
)
|
||||||
|
|
||||||
|
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||||
|
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||||
|
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||||
|
|
||||||
|
|
||||||
|
MACHINE_ALL = [
|
||||||
|
"intel-nuc",
|
||||||
|
"odroid-c2",
|
||||||
|
"odroid-n2",
|
||||||
|
"odroid-xu",
|
||||||
|
"qemuarm-64",
|
||||||
|
"qemuarm",
|
||||||
|
"qemux86-64",
|
||||||
|
"qemux86",
|
||||||
|
"raspberrypi",
|
||||||
|
"raspberrypi2",
|
||||||
|
"raspberrypi3-64",
|
||||||
|
"raspberrypi3",
|
||||||
|
"raspberrypi4-64",
|
||||||
|
"raspberrypi4",
|
||||||
|
"tinker",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _simple_startup(value):
|
||||||
|
"""Simple startup schema."""
|
||||||
|
if value == "before":
|
||||||
|
return STARTUP_SERVICES
|
||||||
|
if value == "after":
|
||||||
|
return STARTUP_APPLICATION
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||||
|
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||||
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
|
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||||
|
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
|
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_STAGE, default=AddonStages.STABLE): vol.Coerce(AddonStages),
|
||||||
|
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||||
|
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||||
|
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||||
|
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||||
|
network_port, vol.Equal(0)
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
|
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
|
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||||
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
|
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||||
|
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||||
|
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
|
||||||
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
|
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Coerce(str): vol.Any(
|
||||||
|
SCHEMA_ELEMENT,
|
||||||
|
[
|
||||||
|
vol.Any(
|
||||||
|
SCHEMA_ELEMENT,
|
||||||
|
{
|
||||||
|
vol.Coerce(str): vol.Any(
|
||||||
|
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
],
|
||||||
|
vol.Schema(
|
||||||
|
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||||
|
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||||
|
vol.Coerce(int), vol.Range(min=10, max=120)
|
||||||
|
),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||||
|
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
|
||||||
|
{vol.Coerce(str): vol.Coerce(str)}
|
||||||
|
),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_ADDON_USER = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||||
|
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||||
|
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||||
|
str
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||||
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
|
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): alsa_device,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): alsa_device,
|
||||||
|
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||||
|
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||||
|
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||||
|
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||||
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]):
|
||||||
|
"""Validate schema."""
|
||||||
|
|
||||||
|
def validate(struct):
|
||||||
|
"""Create schema validator for add-ons options."""
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
# read options
|
||||||
|
for key, value in struct.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
|
if key not in raw_schema:
|
||||||
|
_LOGGER.warning("Unknown options %s", key)
|
||||||
|
continue
|
||||||
|
|
||||||
|
typ = raw_schema[key]
|
||||||
|
try:
|
||||||
|
if isinstance(typ, list):
|
||||||
|
# nested value list
|
||||||
|
options[key] = _nested_validate_list(coresys, typ[0], value, key)
|
||||||
|
elif isinstance(typ, dict):
|
||||||
|
# nested value dict
|
||||||
|
options[key] = _nested_validate_dict(coresys, typ, value, key)
|
||||||
|
else:
|
||||||
|
# normal value
|
||||||
|
options[key] = _single_validate(coresys, typ, value, key)
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
raise vol.Invalid(f"Type error for {key}") from None
|
||||||
|
|
||||||
|
_check_missing_options(raw_schema, options, "root")
|
||||||
|
return options
|
||||||
|
|
||||||
|
return validate
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
# pylint: disable=inconsistent-return-statements
|
||||||
|
def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||||
|
"""Validate a single element."""
|
||||||
|
# if required argument
|
||||||
|
if value is None:
|
||||||
|
raise vol.Invalid(f"Missing required option '{key}'")
|
||||||
|
|
||||||
|
# Lookup secret
|
||||||
|
if str(value).startswith("!secret "):
|
||||||
|
secret: str = value.partition(" ")[2]
|
||||||
|
value = coresys.secrets.get(secret)
|
||||||
|
if value is None:
|
||||||
|
raise vol.Invalid(f"Unknown secret {secret}")
|
||||||
|
|
||||||
|
# parse extend data from type
|
||||||
|
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||||
|
|
||||||
|
# prepare range
|
||||||
|
range_args = {}
|
||||||
|
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||||
|
group_value = match.group(group_name)
|
||||||
|
if group_value:
|
||||||
|
range_args[group_name[2:]] = float(group_value)
|
||||||
|
|
||||||
|
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||||
|
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||||
|
elif typ.startswith(V_INT):
|
||||||
|
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||||
|
elif typ.startswith(V_FLOAT):
|
||||||
|
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||||
|
elif typ.startswith(V_BOOL):
|
||||||
|
return vol.Boolean()(value)
|
||||||
|
elif typ.startswith(V_EMAIL):
|
||||||
|
return vol.Email()(value)
|
||||||
|
elif typ.startswith(V_URL):
|
||||||
|
return vol.Url()(value)
|
||||||
|
elif typ.startswith(V_PORT):
|
||||||
|
return network_port(value)
|
||||||
|
elif typ.startswith(V_MATCH):
|
||||||
|
return vol.Match(match.group("match"))(str(value))
|
||||||
|
elif typ.startswith(V_LIST):
|
||||||
|
return vol.In(match.group("list").split("|"))(str(value))
|
||||||
|
|
||||||
|
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_validate_list(coresys, typ, data_list, key):
|
||||||
|
"""Validate nested items."""
|
||||||
|
options = []
|
||||||
|
|
||||||
|
for element in data_list:
|
||||||
|
# Nested?
|
||||||
|
if isinstance(typ, dict):
|
||||||
|
c_options = _nested_validate_dict(coresys, typ, element, key)
|
||||||
|
options.append(c_options)
|
||||||
|
else:
|
||||||
|
options.append(_single_validate(coresys, typ, element, key))
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_validate_dict(coresys, typ, data_dict, key):
|
||||||
|
"""Validate nested items."""
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
for c_key, c_value in data_dict.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
|
if c_key not in typ:
|
||||||
|
_LOGGER.warning("Unknown options %s", c_key)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Nested?
|
||||||
|
if isinstance(typ[c_key], list):
|
||||||
|
options[c_key] = _nested_validate_list(
|
||||||
|
coresys, typ[c_key][0], c_value, c_key
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key)
|
||||||
|
|
||||||
|
_check_missing_options(typ, options, key)
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def _check_missing_options(origin, exists, root):
|
||||||
|
"""Check if all options are exists."""
|
||||||
|
missing = set(origin) - set(exists)
|
||||||
|
for miss_opt in missing:
|
||||||
|
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||||
|
continue
|
||||||
|
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||||
|
|
||||||
|
|
||||||
|
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||||
|
"""Generate UI schema."""
|
||||||
|
ui_schema = []
|
||||||
|
|
||||||
|
# read options
|
||||||
|
for key, value in raw_schema.items():
|
||||||
|
if isinstance(value, list):
|
||||||
|
# nested value list
|
||||||
|
_nested_ui_list(ui_schema, value, key)
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
# nested value dict
|
||||||
|
_nested_ui_dict(ui_schema, value, key)
|
||||||
|
else:
|
||||||
|
# normal value
|
||||||
|
_single_ui_option(ui_schema, value, key)
|
||||||
|
|
||||||
|
return ui_schema
|
||||||
|
|
||||||
|
|
||||||
|
def _single_ui_option(
|
||||||
|
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Validate a single element."""
|
||||||
|
ui_node = {"name": key}
|
||||||
|
|
||||||
|
# If multiple
|
||||||
|
if multiple:
|
||||||
|
ui_node["multiple"] = True
|
||||||
|
|
||||||
|
# Parse extend data from type
|
||||||
|
match = RE_SCHEMA_ELEMENT.match(value)
|
||||||
|
|
||||||
|
# Prepare range
|
||||||
|
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||||
|
group_value = match.group(group_name)
|
||||||
|
if not group_value:
|
||||||
|
continue
|
||||||
|
if group_name[2:] == "min":
|
||||||
|
ui_node["lengthMin"] = float(group_value)
|
||||||
|
elif group_name[2:] == "max":
|
||||||
|
ui_node["lengthMax"] = float(group_value)
|
||||||
|
|
||||||
|
# If required
|
||||||
|
if value.endswith("?"):
|
||||||
|
ui_node["optional"] = True
|
||||||
|
else:
|
||||||
|
ui_node["required"] = True
|
||||||
|
|
||||||
|
# Data types
|
||||||
|
if value.startswith(V_STR):
|
||||||
|
ui_node["type"] = "string"
|
||||||
|
elif value.startswith(V_PASSWORD):
|
||||||
|
ui_node["type"] = "string"
|
||||||
|
ui_node["format"] = "password"
|
||||||
|
elif value.startswith(V_INT):
|
||||||
|
ui_node["type"] = "integer"
|
||||||
|
elif value.startswith(V_FLOAT):
|
||||||
|
ui_node["type"] = "float"
|
||||||
|
elif value.startswith(V_BOOL):
|
||||||
|
ui_node["type"] = "boolean"
|
||||||
|
elif value.startswith(V_EMAIL):
|
||||||
|
ui_node["type"] = "string"
|
||||||
|
ui_node["format"] = "email"
|
||||||
|
elif value.startswith(V_URL):
|
||||||
|
ui_node["type"] = "string"
|
||||||
|
ui_node["format"] = "url"
|
||||||
|
elif value.startswith(V_PORT):
|
||||||
|
ui_node["type"] = "integer"
|
||||||
|
elif value.startswith(V_MATCH):
|
||||||
|
ui_node["type"] = "string"
|
||||||
|
elif value.startswith(V_LIST):
|
||||||
|
ui_node["type"] = "select"
|
||||||
|
ui_node["options"] = match.group("list").split("|")
|
||||||
|
|
||||||
|
ui_schema.append(ui_node)
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_ui_list(
|
||||||
|
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||||
|
) -> None:
|
||||||
|
"""UI nested list items."""
|
||||||
|
try:
|
||||||
|
element = option_list[0]
|
||||||
|
except IndexError:
|
||||||
|
_LOGGER.error("Invalid schema %s", key)
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(element, dict):
|
||||||
|
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||||
|
else:
|
||||||
|
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_ui_dict(
|
||||||
|
ui_schema: List[Dict[str, Any]],
|
||||||
|
option_dict: Dict[str, Any],
|
||||||
|
key: str,
|
||||||
|
multiple: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""UI nested dict items."""
|
||||||
|
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||||
|
|
||||||
|
nested_schema = []
|
||||||
|
for c_key, c_value in option_dict.items():
|
||||||
|
# Nested?
|
||||||
|
if isinstance(c_value, list):
|
||||||
|
_nested_ui_list(nested_schema, c_value, c_key)
|
||||||
|
else:
|
||||||
|
_single_ui_option(nested_schema, c_value, c_key)
|
||||||
|
|
||||||
|
ui_node["schema"] = nested_schema
|
||||||
|
ui_schema.append(ui_node)
|
322
hassio/api/__init__.py
Normal file
322
hassio/api/__init__.py
Normal file
@ -0,0 +1,322 @@
|
|||||||
|
"""Init file for Hass.io RESTful API."""
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from .addons import APIAddons
|
||||||
|
from .auth import APIAuth
|
||||||
|
from .discovery import APIDiscovery
|
||||||
|
from .dns import APICoreDNS
|
||||||
|
from .hardware import APIHardware
|
||||||
|
from .hassos import APIHassOS
|
||||||
|
from .homeassistant import APIHomeAssistant
|
||||||
|
from .host import APIHost
|
||||||
|
from .info import APIInfo
|
||||||
|
from .ingress import APIIngress
|
||||||
|
from .proxy import APIProxy
|
||||||
|
from .security import SecurityMiddleware
|
||||||
|
from .services import APIServices
|
||||||
|
from .snapshots import APISnapshots
|
||||||
|
from .supervisor import APISupervisor
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
|
||||||
|
|
||||||
|
|
||||||
|
class RestAPI(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for Hass.io."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys):
|
||||||
|
"""Initialize Docker base wrapper."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||||
|
self.webapp: web.Application = web.Application(
|
||||||
|
client_max_size=MAX_CLIENT_SIZE,
|
||||||
|
middlewares=[self.security.token_validation],
|
||||||
|
)
|
||||||
|
|
||||||
|
# service stuff
|
||||||
|
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||||
|
self._site: Optional[web.TCPSite] = None
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
|
"""Register REST API Calls."""
|
||||||
|
self._register_supervisor()
|
||||||
|
self._register_host()
|
||||||
|
self._register_hassos()
|
||||||
|
self._register_hardware()
|
||||||
|
self._register_homeassistant()
|
||||||
|
self._register_proxy()
|
||||||
|
self._register_panel()
|
||||||
|
self._register_addons()
|
||||||
|
self._register_ingress()
|
||||||
|
self._register_snapshots()
|
||||||
|
self._register_discovery()
|
||||||
|
self._register_services()
|
||||||
|
self._register_info()
|
||||||
|
self._register_auth()
|
||||||
|
self._register_dns()
|
||||||
|
|
||||||
|
def _register_host(self) -> None:
|
||||||
|
"""Register hostcontrol functions."""
|
||||||
|
api_host = APIHost()
|
||||||
|
api_host.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/host/info", api_host.info),
|
||||||
|
web.post("/host/reboot", api_host.reboot),
|
||||||
|
web.post("/host/shutdown", api_host.shutdown),
|
||||||
|
web.post("/host/reload", api_host.reload),
|
||||||
|
web.post("/host/options", api_host.options),
|
||||||
|
web.get("/host/services", api_host.services),
|
||||||
|
web.post("/host/services/{service}/stop", api_host.service_stop),
|
||||||
|
web.post("/host/services/{service}/start", api_host.service_start),
|
||||||
|
web.post("/host/services/{service}/restart", api_host.service_restart),
|
||||||
|
web.post("/host/services/{service}/reload", api_host.service_reload),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_hassos(self) -> None:
|
||||||
|
"""Register HassOS functions."""
|
||||||
|
api_hassos = APIHassOS()
|
||||||
|
api_hassos.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/hassos/info", api_hassos.info),
|
||||||
|
web.post("/hassos/update", api_hassos.update),
|
||||||
|
web.post("/hassos/update/cli", api_hassos.update_cli),
|
||||||
|
web.post("/hassos/config/sync", api_hassos.config_sync),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_hardware(self) -> None:
|
||||||
|
"""Register hardware functions."""
|
||||||
|
api_hardware = APIHardware()
|
||||||
|
api_hardware.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/hardware/info", api_hardware.info),
|
||||||
|
web.get("/hardware/audio", api_hardware.audio),
|
||||||
|
web.post("/hardware/trigger", api_hardware.trigger),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_info(self) -> None:
|
||||||
|
"""Register info functions."""
|
||||||
|
api_info = APIInfo()
|
||||||
|
api_info.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes([web.get("/info", api_info.info)])
|
||||||
|
|
||||||
|
def _register_auth(self) -> None:
|
||||||
|
"""Register auth functions."""
|
||||||
|
api_auth = APIAuth()
|
||||||
|
api_auth.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[web.post("/auth", api_auth.auth), web.post("/auth/reset", api_auth.reset)]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_supervisor(self) -> None:
|
||||||
|
"""Register Supervisor functions."""
|
||||||
|
api_supervisor = APISupervisor()
|
||||||
|
api_supervisor.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/supervisor/ping", api_supervisor.ping),
|
||||||
|
web.get("/supervisor/info", api_supervisor.info),
|
||||||
|
web.get("/supervisor/stats", api_supervisor.stats),
|
||||||
|
web.get("/supervisor/logs", api_supervisor.logs),
|
||||||
|
web.post("/supervisor/update", api_supervisor.update),
|
||||||
|
web.post("/supervisor/reload", api_supervisor.reload),
|
||||||
|
web.post("/supervisor/options", api_supervisor.options),
|
||||||
|
web.post("/supervisor/repair", api_supervisor.repair),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_homeassistant(self) -> None:
|
||||||
|
"""Register Home Assistant functions."""
|
||||||
|
api_hass = APIHomeAssistant()
|
||||||
|
api_hass.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/homeassistant/info", api_hass.info),
|
||||||
|
web.get("/homeassistant/logs", api_hass.logs),
|
||||||
|
web.get("/homeassistant/stats", api_hass.stats),
|
||||||
|
web.post("/homeassistant/options", api_hass.options),
|
||||||
|
web.post("/homeassistant/update", api_hass.update),
|
||||||
|
web.post("/homeassistant/restart", api_hass.restart),
|
||||||
|
web.post("/homeassistant/stop", api_hass.stop),
|
||||||
|
web.post("/homeassistant/start", api_hass.start),
|
||||||
|
web.post("/homeassistant/check", api_hass.check),
|
||||||
|
web.post("/homeassistant/rebuild", api_hass.rebuild),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_proxy(self) -> None:
|
||||||
|
"""Register Home Assistant API Proxy."""
|
||||||
|
api_proxy = APIProxy()
|
||||||
|
api_proxy.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/homeassistant/api/websocket", api_proxy.websocket),
|
||||||
|
web.get("/homeassistant/websocket", api_proxy.websocket),
|
||||||
|
web.get("/homeassistant/api/stream", api_proxy.stream),
|
||||||
|
web.post("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||||
|
web.get("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||||
|
web.get("/homeassistant/api/", api_proxy.api),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_addons(self) -> None:
|
||||||
|
"""Register Add-on functions."""
|
||||||
|
api_addons = APIAddons()
|
||||||
|
api_addons.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/addons", api_addons.list),
|
||||||
|
web.post("/addons/reload", api_addons.reload),
|
||||||
|
web.get("/addons/{addon}/info", api_addons.info),
|
||||||
|
web.post("/addons/{addon}/install", api_addons.install),
|
||||||
|
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||||
|
web.post("/addons/{addon}/start", api_addons.start),
|
||||||
|
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||||
|
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||||
|
web.post("/addons/{addon}/update", api_addons.update),
|
||||||
|
web.post("/addons/{addon}/options", api_addons.options),
|
||||||
|
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||||
|
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||||
|
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||||
|
web.get("/addons/{addon}/logo", api_addons.logo),
|
||||||
|
web.get("/addons/{addon}/changelog", api_addons.changelog),
|
||||||
|
web.get("/addons/{addon}/documentation", api_addons.documentation),
|
||||||
|
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||||
|
web.post("/addons/{addon}/security", api_addons.security),
|
||||||
|
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_ingress(self) -> None:
|
||||||
|
"""Register Ingress functions."""
|
||||||
|
api_ingress = APIIngress()
|
||||||
|
api_ingress.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.post("/ingress/session", api_ingress.create_session),
|
||||||
|
web.get("/ingress/panels", api_ingress.panels),
|
||||||
|
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_snapshots(self) -> None:
|
||||||
|
"""Register snapshots functions."""
|
||||||
|
api_snapshots = APISnapshots()
|
||||||
|
api_snapshots.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/snapshots", api_snapshots.list),
|
||||||
|
web.post("/snapshots/reload", api_snapshots.reload),
|
||||||
|
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
|
||||||
|
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
|
||||||
|
web.post("/snapshots/new/upload", api_snapshots.upload),
|
||||||
|
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
|
||||||
|
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||||
|
web.post(
|
||||||
|
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
|
||||||
|
),
|
||||||
|
web.post(
|
||||||
|
"/snapshots/{snapshot}/restore/partial",
|
||||||
|
api_snapshots.restore_partial,
|
||||||
|
),
|
||||||
|
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_services(self) -> None:
|
||||||
|
"""Register services functions."""
|
||||||
|
api_services = APIServices()
|
||||||
|
api_services.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/services", api_services.list),
|
||||||
|
web.get("/services/{service}", api_services.get_service),
|
||||||
|
web.post("/services/{service}", api_services.set_service),
|
||||||
|
web.delete("/services/{service}", api_services.del_service),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_discovery(self) -> None:
|
||||||
|
"""Register discovery functions."""
|
||||||
|
api_discovery = APIDiscovery()
|
||||||
|
api_discovery.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/discovery", api_discovery.list),
|
||||||
|
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||||
|
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||||
|
web.post("/discovery", api_discovery.set_discovery),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_dns(self) -> None:
|
||||||
|
"""Register DNS functions."""
|
||||||
|
api_dns = APICoreDNS()
|
||||||
|
api_dns.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/dns/info", api_dns.info),
|
||||||
|
web.get("/dns/stats", api_dns.stats),
|
||||||
|
web.get("/dns/logs", api_dns.logs),
|
||||||
|
web.post("/dns/update", api_dns.update),
|
||||||
|
web.post("/dns/options", api_dns.options),
|
||||||
|
web.post("/dns/restart", api_dns.restart),
|
||||||
|
web.post("/dns/reset", api_dns.reset),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_panel(self) -> None:
|
||||||
|
"""Register panel for Home Assistant."""
|
||||||
|
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||||
|
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
"""Run RESTful API webserver."""
|
||||||
|
await self._runner.setup()
|
||||||
|
self._site = web.TCPSite(
|
||||||
|
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self._site.start()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
"""Stop RESTful API webserver."""
|
||||||
|
if not self._site:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Shutdown running API
|
||||||
|
await self._site.stop()
|
||||||
|
await self._runner.cleanup()
|
||||||
|
|
||||||
|
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
454
hassio/api/addons.py
Normal file
454
hassio/api/addons.py
Normal file
@ -0,0 +1,454 @@
|
|||||||
|
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Awaitable, Dict, List
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..addons import AnyAddon
|
||||||
|
from ..addons.utils import rating_security
|
||||||
|
from ..const import (
|
||||||
|
ATTR_ADDONS,
|
||||||
|
ATTR_ADVANCED,
|
||||||
|
ATTR_APPARMOR,
|
||||||
|
ATTR_ARCH,
|
||||||
|
ATTR_AUDIO,
|
||||||
|
ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_AUDIO_OUTPUT,
|
||||||
|
ATTR_AUTH_API,
|
||||||
|
ATTR_AUTO_UPDATE,
|
||||||
|
ATTR_AVAILABLE,
|
||||||
|
ATTR_BLK_READ,
|
||||||
|
ATTR_BLK_WRITE,
|
||||||
|
ATTR_BOOT,
|
||||||
|
ATTR_BUILD,
|
||||||
|
ATTR_CHANGELOG,
|
||||||
|
ATTR_CPU_PERCENT,
|
||||||
|
ATTR_DESCRIPTON,
|
||||||
|
ATTR_DETACHED,
|
||||||
|
ATTR_DEVICES,
|
||||||
|
ATTR_DEVICETREE,
|
||||||
|
ATTR_DISCOVERY,
|
||||||
|
ATTR_DNS,
|
||||||
|
ATTR_DOCKER_API,
|
||||||
|
ATTR_DOCUMENTATION,
|
||||||
|
ATTR_FULL_ACCESS,
|
||||||
|
ATTR_GPIO,
|
||||||
|
ATTR_HASSIO_API,
|
||||||
|
ATTR_HASSIO_ROLE,
|
||||||
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_HOMEASSISTANT_API,
|
||||||
|
ATTR_HOST_DBUS,
|
||||||
|
ATTR_HOST_IPC,
|
||||||
|
ATTR_HOST_NETWORK,
|
||||||
|
ATTR_HOST_PID,
|
||||||
|
ATTR_HOSTNAME,
|
||||||
|
ATTR_ICON,
|
||||||
|
ATTR_INGRESS,
|
||||||
|
ATTR_INGRESS_ENTRY,
|
||||||
|
ATTR_INGRESS_PANEL,
|
||||||
|
ATTR_INGRESS_PORT,
|
||||||
|
ATTR_INGRESS_URL,
|
||||||
|
ATTR_INSTALLED,
|
||||||
|
ATTR_IP_ADDRESS,
|
||||||
|
ATTR_KERNEL_MODULES,
|
||||||
|
ATTR_LAST_VERSION,
|
||||||
|
ATTR_LOGO,
|
||||||
|
ATTR_LONG_DESCRIPTION,
|
||||||
|
ATTR_MACHINE,
|
||||||
|
ATTR_MAINTAINER,
|
||||||
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_NETWORK,
|
||||||
|
ATTR_NETWORK_DESCRIPTION,
|
||||||
|
ATTR_NETWORK_RX,
|
||||||
|
ATTR_NETWORK_TX,
|
||||||
|
ATTR_OPTIONS,
|
||||||
|
ATTR_PRIVILEGED,
|
||||||
|
ATTR_PROTECTED,
|
||||||
|
ATTR_RATING,
|
||||||
|
ATTR_REPOSITORIES,
|
||||||
|
ATTR_REPOSITORY,
|
||||||
|
ATTR_SCHEMA,
|
||||||
|
ATTR_SERVICES,
|
||||||
|
ATTR_SLUG,
|
||||||
|
ATTR_SOURCE,
|
||||||
|
ATTR_STAGE,
|
||||||
|
ATTR_STATE,
|
||||||
|
ATTR_STDIN,
|
||||||
|
ATTR_UDEV,
|
||||||
|
ATTR_URL,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_WEBUI,
|
||||||
|
BOOT_AUTO,
|
||||||
|
BOOT_MANUAL,
|
||||||
|
CONTENT_TYPE_BINARY,
|
||||||
|
CONTENT_TYPE_PNG,
|
||||||
|
CONTENT_TYPE_TEXT,
|
||||||
|
REQUEST_FROM,
|
||||||
|
STATE_NONE,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..docker.stats import DockerStats
|
||||||
|
from ..exceptions import APIError
|
||||||
|
from ..validate import DOCKER_PORTS, alsa_device
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
|
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||||
|
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): alsa_device,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): alsa_device,
|
||||||
|
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||||
|
|
||||||
|
|
||||||
|
class APIAddons(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for add-on functions."""
|
||||||
|
|
||||||
|
def _extract_addon(
|
||||||
|
self, request: web.Request, check_installed: bool = True
|
||||||
|
) -> AnyAddon:
|
||||||
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
|
# Lookup itself
|
||||||
|
if addon_slug == "self":
|
||||||
|
return request.get(REQUEST_FROM)
|
||||||
|
|
||||||
|
addon = self.sys_addons.get(addon_slug)
|
||||||
|
if not addon:
|
||||||
|
raise APIError("Addon does not exist")
|
||||||
|
|
||||||
|
if check_installed and not addon.is_installed:
|
||||||
|
raise APIError("Addon is not installed")
|
||||||
|
|
||||||
|
return addon
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return all add-ons or repositories."""
|
||||||
|
data_addons = []
|
||||||
|
for addon in self.sys_addons.all:
|
||||||
|
data_addons.append(
|
||||||
|
{
|
||||||
|
ATTR_NAME: addon.name,
|
||||||
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_DESCRIPTON: addon.description,
|
||||||
|
ATTR_ADVANCED: addon.advanced,
|
||||||
|
ATTR_STAGE: addon.stage,
|
||||||
|
ATTR_VERSION: addon.latest_version,
|
||||||
|
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||||
|
ATTR_AVAILABLE: addon.available,
|
||||||
|
ATTR_DETACHED: addon.is_detached,
|
||||||
|
ATTR_REPOSITORY: addon.repository,
|
||||||
|
ATTR_BUILD: addon.need_build,
|
||||||
|
ATTR_URL: addon.url,
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
data_repositories = []
|
||||||
|
for repository in self.sys_store.all:
|
||||||
|
data_repositories.append(
|
||||||
|
{
|
||||||
|
ATTR_SLUG: repository.slug,
|
||||||
|
ATTR_NAME: repository.name,
|
||||||
|
ATTR_SOURCE: repository.source,
|
||||||
|
ATTR_URL: repository.url,
|
||||||
|
ATTR_MAINTAINER: repository.maintainer,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def reload(self, request: web.Request) -> None:
|
||||||
|
"""Reload all add-on data from store."""
|
||||||
|
await asyncio.shield(self.sys_store.reload())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return add-on information."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
ATTR_NAME: addon.name,
|
||||||
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_HOSTNAME: addon.hostname,
|
||||||
|
ATTR_DNS: addon.dns,
|
||||||
|
ATTR_DESCRIPTON: addon.description,
|
||||||
|
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||||
|
ATTR_ADVANCED: addon.advanced,
|
||||||
|
ATTR_STAGE: addon.stage,
|
||||||
|
ATTR_AUTO_UPDATE: None,
|
||||||
|
ATTR_REPOSITORY: addon.repository,
|
||||||
|
ATTR_VERSION: None,
|
||||||
|
ATTR_LAST_VERSION: addon.latest_version,
|
||||||
|
ATTR_PROTECTED: addon.protected,
|
||||||
|
ATTR_RATING: rating_security(addon),
|
||||||
|
ATTR_BOOT: addon.boot,
|
||||||
|
ATTR_OPTIONS: addon.options,
|
||||||
|
ATTR_SCHEMA: addon.schema_ui,
|
||||||
|
ATTR_ARCH: addon.supported_arch,
|
||||||
|
ATTR_MACHINE: addon.supported_machine,
|
||||||
|
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||||
|
ATTR_URL: addon.url,
|
||||||
|
ATTR_STATE: STATE_NONE,
|
||||||
|
ATTR_DETACHED: addon.is_detached,
|
||||||
|
ATTR_AVAILABLE: addon.available,
|
||||||
|
ATTR_BUILD: addon.need_build,
|
||||||
|
ATTR_NETWORK: addon.ports,
|
||||||
|
ATTR_NETWORK_DESCRIPTION: addon.ports_description,
|
||||||
|
ATTR_HOST_NETWORK: addon.host_network,
|
||||||
|
ATTR_HOST_PID: addon.host_pid,
|
||||||
|
ATTR_HOST_IPC: addon.host_ipc,
|
||||||
|
ATTR_HOST_DBUS: addon.host_dbus,
|
||||||
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
|
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||||
|
ATTR_APPARMOR: addon.apparmor,
|
||||||
|
ATTR_DEVICES: _pretty_devices(addon),
|
||||||
|
ATTR_ICON: addon.with_icon,
|
||||||
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_CHANGELOG: addon.with_changelog,
|
||||||
|
ATTR_DOCUMENTATION: addon.with_documentation,
|
||||||
|
ATTR_STDIN: addon.with_stdin,
|
||||||
|
ATTR_WEBUI: None,
|
||||||
|
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||||
|
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||||
|
ATTR_AUTH_API: addon.access_auth_api,
|
||||||
|
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||||
|
ATTR_GPIO: addon.with_gpio,
|
||||||
|
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||||
|
ATTR_DEVICETREE: addon.with_devicetree,
|
||||||
|
ATTR_UDEV: addon.with_udev,
|
||||||
|
ATTR_DOCKER_API: addon.access_docker_api,
|
||||||
|
ATTR_AUDIO: addon.with_audio,
|
||||||
|
ATTR_AUDIO_INPUT: None,
|
||||||
|
ATTR_AUDIO_OUTPUT: None,
|
||||||
|
ATTR_SERVICES: _pretty_services(addon),
|
||||||
|
ATTR_DISCOVERY: addon.discovery,
|
||||||
|
ATTR_IP_ADDRESS: None,
|
||||||
|
ATTR_INGRESS: addon.with_ingress,
|
||||||
|
ATTR_INGRESS_ENTRY: None,
|
||||||
|
ATTR_INGRESS_URL: None,
|
||||||
|
ATTR_INGRESS_PORT: None,
|
||||||
|
ATTR_INGRESS_PANEL: None,
|
||||||
|
}
|
||||||
|
|
||||||
|
if addon.is_installed:
|
||||||
|
data.update(
|
||||||
|
{
|
||||||
|
ATTR_STATE: await addon.state(),
|
||||||
|
ATTR_WEBUI: addon.webui,
|
||||||
|
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||||
|
ATTR_INGRESS_URL: addon.ingress_url,
|
||||||
|
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||||
|
ATTR_INGRESS_PANEL: addon.ingress_panel,
|
||||||
|
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||||
|
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||||
|
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||||
|
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||||
|
ATTR_VERSION: addon.version,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request: web.Request) -> None:
|
||||||
|
"""Store user options for add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
|
# Update secrets for validation
|
||||||
|
await self.sys_secrets.reload()
|
||||||
|
|
||||||
|
# Extend schema with add-on specific validation
|
||||||
|
addon_schema = SCHEMA_OPTIONS.extend(
|
||||||
|
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate/Process Body
|
||||||
|
body = await api_validate(addon_schema, request, origin=[ATTR_OPTIONS])
|
||||||
|
if ATTR_OPTIONS in body:
|
||||||
|
addon.options = body[ATTR_OPTIONS]
|
||||||
|
if ATTR_BOOT in body:
|
||||||
|
addon.boot = body[ATTR_BOOT]
|
||||||
|
if ATTR_AUTO_UPDATE in body:
|
||||||
|
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||||
|
if ATTR_NETWORK in body:
|
||||||
|
addon.ports = body[ATTR_NETWORK]
|
||||||
|
if ATTR_AUDIO_INPUT in body:
|
||||||
|
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||||
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
|
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
if ATTR_INGRESS_PANEL in body:
|
||||||
|
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
|
addon.save_persist()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def security(self, request: web.Request) -> None:
|
||||||
|
"""Store security options for add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
|
if ATTR_PROTECTED in body:
|
||||||
|
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||||
|
addon.protected = body[ATTR_PROTECTED]
|
||||||
|
|
||||||
|
addon.save_persist()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return resource information."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def install(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Install add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
return asyncio.shield(addon.install())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Uninstall add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
return asyncio.shield(addon.uninstall())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Start add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
return asyncio.shield(addon.start())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Stop add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def update(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Update add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
|
if addon.latest_version == addon.version:
|
||||||
|
raise APIError("No update available!")
|
||||||
|
|
||||||
|
return asyncio.shield(addon.update())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Restart add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
return asyncio.shield(addon.restart())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Rebuild local build add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
if not addon.need_build:
|
||||||
|
raise APIError("Only local build addons are supported")
|
||||||
|
|
||||||
|
return asyncio.shield(addon.rebuild())
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return logs from add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
return addon.logs()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
|
async def icon(self, request: web.Request) -> bytes:
|
||||||
|
"""Return icon from add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_icon:
|
||||||
|
raise APIError("No icon found!")
|
||||||
|
|
||||||
|
with addon.path_icon.open("rb") as png:
|
||||||
|
return png.read()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
|
async def logo(self, request: web.Request) -> bytes:
|
||||||
|
"""Return logo from add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_logo:
|
||||||
|
raise APIError("No logo found!")
|
||||||
|
|
||||||
|
with addon.path_logo.open("rb") as png:
|
||||||
|
return png.read()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
|
async def changelog(self, request: web.Request) -> str:
|
||||||
|
"""Return changelog from add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_changelog:
|
||||||
|
raise APIError("No changelog found!")
|
||||||
|
|
||||||
|
with addon.path_changelog.open("r") as changelog:
|
||||||
|
return changelog.read()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
|
async def documentation(self, request: web.Request) -> str:
|
||||||
|
"""Return documentation from add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_documentation:
|
||||||
|
raise APIError("No documentation found!")
|
||||||
|
|
||||||
|
with addon.path_documentation.open("r") as documentation:
|
||||||
|
return documentation.read()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stdin(self, request: web.Request) -> None:
|
||||||
|
"""Write to stdin of add-on."""
|
||||||
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
if not addon.with_stdin:
|
||||||
|
raise APIError("STDIN not supported by add-on")
|
||||||
|
|
||||||
|
data = await request.read()
|
||||||
|
await asyncio.shield(addon.write_stdin(data))
|
||||||
|
|
||||||
|
|
||||||
|
def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||||
|
"""Return a simplified device list."""
|
||||||
|
dev_list = addon.devices
|
||||||
|
if not dev_list:
|
||||||
|
return None
|
||||||
|
return [row.split(":")[0] for row in dev_list]
|
||||||
|
|
||||||
|
|
||||||
|
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||||
|
"""Return a simplified services role list."""
|
||||||
|
services = []
|
||||||
|
for name, access in addon.services_role.items():
|
||||||
|
services.append(f"{name}:{access}")
|
||||||
|
return services
|
88
hassio/api/auth.py
Normal file
88
hassio/api/auth.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
"""Init file for Hass.io auth/SSO RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from aiohttp import BasicAuth, web
|
||||||
|
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||||
|
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..addons.addon import Addon
|
||||||
|
from ..const import (
|
||||||
|
ATTR_PASSWORD,
|
||||||
|
ATTR_USERNAME,
|
||||||
|
CONTENT_TYPE_JSON,
|
||||||
|
CONTENT_TYPE_URL,
|
||||||
|
REQUEST_FROM,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIForbidden
|
||||||
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SCHEMA_PASSWORD_RESET = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_USERNAME): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class APIAuth(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for auth functions."""
|
||||||
|
|
||||||
|
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||||
|
"""Process login request with basic auth.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
||||||
|
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||||
|
|
||||||
|
def _process_dict(
|
||||||
|
self, request: web.Request, addon: Addon, data: Dict[str, str]
|
||||||
|
) -> bool:
|
||||||
|
"""Process login with dict data.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
username = data.get("username") or data.get("user")
|
||||||
|
password = data.get("password")
|
||||||
|
|
||||||
|
return self.sys_auth.check_login(addon, username, password)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def auth(self, request: web.Request) -> bool:
|
||||||
|
"""Process login request."""
|
||||||
|
addon = request[REQUEST_FROM]
|
||||||
|
|
||||||
|
if not addon.access_auth_api:
|
||||||
|
raise APIForbidden("Can't use Home Assistant auth!")
|
||||||
|
|
||||||
|
# BasicAuth
|
||||||
|
if AUTHORIZATION in request.headers:
|
||||||
|
return await self._process_basic(request, addon)
|
||||||
|
|
||||||
|
# Json
|
||||||
|
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||||
|
data = await request.json()
|
||||||
|
return await self._process_dict(request, addon, data)
|
||||||
|
|
||||||
|
# URL encoded
|
||||||
|
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||||
|
data = await request.post()
|
||||||
|
return await self._process_dict(request, addon, data)
|
||||||
|
|
||||||
|
raise HTTPUnauthorized(
|
||||||
|
headers={WWW_AUTHENTICATE: 'Basic realm="Hass.io Authentication"'}
|
||||||
|
)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def reset(self, request: web.Request) -> None:
|
||||||
|
"""Process reset password request."""
|
||||||
|
body: Dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
|
||||||
|
await asyncio.shield(
|
||||||
|
self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD])
|
||||||
|
)
|
100
hassio/api/discovery.py
Normal file
100
hassio/api/discovery.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
"""Init file for Hass.io network RESTful API."""
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from .utils import api_process, api_validate
|
||||||
|
from ..const import (
|
||||||
|
ATTR_ADDON,
|
||||||
|
ATTR_UUID,
|
||||||
|
ATTR_CONFIG,
|
||||||
|
ATTR_DISCOVERY,
|
||||||
|
ATTR_SERVICE,
|
||||||
|
REQUEST_FROM,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError, APIForbidden
|
||||||
|
from ..discovery.validate import valid_discovery_service
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||||
|
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class APIDiscovery(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for discovery functions."""
|
||||||
|
|
||||||
|
def _extract_message(self, request):
|
||||||
|
"""Extract discovery message from URL."""
|
||||||
|
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||||
|
if not message:
|
||||||
|
raise APIError("Discovery message not found")
|
||||||
|
return message
|
||||||
|
|
||||||
|
def _check_permission_ha(self, request):
|
||||||
|
"""Check permission for API call / Home Assistant."""
|
||||||
|
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||||
|
raise APIForbidden("Only HomeAssistant can use this API!")
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list(self, request):
|
||||||
|
"""Show register services."""
|
||||||
|
self._check_permission_ha(request)
|
||||||
|
|
||||||
|
discovery = []
|
||||||
|
for message in self.sys_discovery.list_messages:
|
||||||
|
discovery.append(
|
||||||
|
{
|
||||||
|
ATTR_ADDON: message.addon,
|
||||||
|
ATTR_SERVICE: message.service,
|
||||||
|
ATTR_UUID: message.uuid,
|
||||||
|
ATTR_CONFIG: message.config,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {ATTR_DISCOVERY: discovery}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def set_discovery(self, request):
|
||||||
|
"""Write data into a discovery pipeline."""
|
||||||
|
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||||
|
addon = request[REQUEST_FROM]
|
||||||
|
|
||||||
|
# Access?
|
||||||
|
if body[ATTR_SERVICE] not in addon.discovery:
|
||||||
|
raise APIForbidden(f"Can't use discovery!")
|
||||||
|
|
||||||
|
# Process discovery message
|
||||||
|
message = self.sys_discovery.send(addon, **body)
|
||||||
|
|
||||||
|
return {ATTR_UUID: message.uuid}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def get_discovery(self, request):
|
||||||
|
"""Read data into a discovery message."""
|
||||||
|
message = self._extract_message(request)
|
||||||
|
|
||||||
|
# HomeAssistant?
|
||||||
|
self._check_permission_ha(request)
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_ADDON: message.addon,
|
||||||
|
ATTR_SERVICE: message.service,
|
||||||
|
ATTR_UUID: message.uuid,
|
||||||
|
ATTR_CONFIG: message.config,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def del_discovery(self, request):
|
||||||
|
"""Delete data into a discovery message."""
|
||||||
|
message = self._extract_message(request)
|
||||||
|
addon = request[REQUEST_FROM]
|
||||||
|
|
||||||
|
# Permission
|
||||||
|
if message.addon != addon.slug:
|
||||||
|
raise APIForbidden(f"Can't remove discovery message")
|
||||||
|
|
||||||
|
self.sys_discovery.remove(message)
|
||||||
|
return True
|
102
hassio/api/dns.py
Normal file
102
hassio/api/dns.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
"""Init file for Hass.io DNS RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Awaitable, Dict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_BLK_READ,
|
||||||
|
ATTR_BLK_WRITE,
|
||||||
|
ATTR_CPU_PERCENT,
|
||||||
|
ATTR_HOST,
|
||||||
|
ATTR_LATEST_VERSION,
|
||||||
|
ATTR_LOCALS,
|
||||||
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_NETWORK_RX,
|
||||||
|
ATTR_NETWORK_TX,
|
||||||
|
ATTR_SERVERS,
|
||||||
|
ATTR_VERSION,
|
||||||
|
CONTENT_TYPE_BINARY,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError
|
||||||
|
from ..validate import dns_server_list
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
|
||||||
|
class APICoreDNS(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for DNS functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return DNS information."""
|
||||||
|
return {
|
||||||
|
ATTR_VERSION: self.sys_dns.version,
|
||||||
|
ATTR_LATEST_VERSION: self.sys_dns.latest_version,
|
||||||
|
ATTR_HOST: str(self.sys_docker.network.dns),
|
||||||
|
ATTR_SERVERS: self.sys_dns.servers,
|
||||||
|
ATTR_LOCALS: self.sys_host.network.dns_servers,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request: web.Request) -> None:
|
||||||
|
"""Set DNS options."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_SERVERS in body:
|
||||||
|
self.sys_dns.servers = body[ATTR_SERVERS]
|
||||||
|
self.sys_create_task(self.sys_dns.restart())
|
||||||
|
|
||||||
|
self.sys_dns.save_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return resource information."""
|
||||||
|
stats = await self.sys_dns.stats()
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request: web.Request) -> None:
|
||||||
|
"""Update DNS plugin."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_dns.latest_version)
|
||||||
|
|
||||||
|
if version == self.sys_dns.version:
|
||||||
|
raise APIError("Version {} is already in use".format(version))
|
||||||
|
await asyncio.shield(self.sys_dns.update(version))
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return DNS Docker logs."""
|
||||||
|
return self.sys_dns.logs()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Restart CoreDNS plugin."""
|
||||||
|
return asyncio.shield(self.sys_dns.restart())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def reset(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Reset CoreDNS plugin."""
|
||||||
|
return asyncio.shield(self.sys_dns.reset())
|
51
hassio/api/hardware.py
Normal file
51
hassio/api/hardware.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
"""Init file for Hass.io hardware RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from .utils import api_process
|
||||||
|
from ..const import (
|
||||||
|
ATTR_SERIAL,
|
||||||
|
ATTR_DISK,
|
||||||
|
ATTR_GPIO,
|
||||||
|
ATTR_AUDIO,
|
||||||
|
ATTR_INPUT,
|
||||||
|
ATTR_OUTPUT,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class APIHardware(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for hardware functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Show hardware info."""
|
||||||
|
return {
|
||||||
|
ATTR_SERIAL: list(
|
||||||
|
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||||
|
),
|
||||||
|
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||||
|
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||||
|
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||||
|
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Show ALSA audio devices."""
|
||||||
|
return {
|
||||||
|
ATTR_AUDIO: {
|
||||||
|
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||||
|
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def trigger(self, request: web.Request) -> None:
|
||||||
|
"""Trigger a udev device reload."""
|
||||||
|
return asyncio.shield(self.sys_hardware.udev_trigger())
|
59
hassio/api/hassos.py
Normal file
59
hassio/api/hassos.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
"""Init file for Hass.io HassOS RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Awaitable, Dict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_BOARD,
|
||||||
|
ATTR_BOOT,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_CLI,
|
||||||
|
ATTR_VERSION_CLI_LATEST,
|
||||||
|
ATTR_VERSION_LATEST,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
|
||||||
|
class APIHassOS(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for HassOS functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return HassOS information."""
|
||||||
|
return {
|
||||||
|
ATTR_VERSION: self.sys_hassos.version,
|
||||||
|
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||||
|
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||||
|
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||||
|
ATTR_BOARD: self.sys_hassos.board,
|
||||||
|
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request: web.Request) -> None:
|
||||||
|
"""Update HassOS."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||||
|
|
||||||
|
await asyncio.shield(self.sys_hassos.update(version))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update_cli(self, request: web.Request) -> None:
|
||||||
|
"""Update HassOS CLI."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||||
|
|
||||||
|
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Trigger config reload on HassOS."""
|
||||||
|
return asyncio.shield(self.sys_hassos.config_sync())
|
163
hassio/api/homeassistant.py
Normal file
163
hassio/api/homeassistant.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Coroutine, Dict, Any
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_ARCH,
|
||||||
|
ATTR_BLK_READ,
|
||||||
|
ATTR_BLK_WRITE,
|
||||||
|
ATTR_BOOT,
|
||||||
|
ATTR_CPU_PERCENT,
|
||||||
|
ATTR_CUSTOM,
|
||||||
|
ATTR_IMAGE,
|
||||||
|
ATTR_LAST_VERSION,
|
||||||
|
ATTR_MACHINE,
|
||||||
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
|
ATTR_NETWORK_RX,
|
||||||
|
ATTR_NETWORK_TX,
|
||||||
|
ATTR_PORT,
|
||||||
|
ATTR_REFRESH_TOKEN,
|
||||||
|
ATTR_SSL,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_WAIT_BOOT,
|
||||||
|
ATTR_WATCHDOG,
|
||||||
|
ATTR_IP_ADDRESS,
|
||||||
|
CONTENT_TYPE_BINARY,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError
|
||||||
|
from ..validate import docker_image, network_port
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||||
|
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(docker_image),
|
||||||
|
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_PORT): network_port,
|
||||||
|
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||||
|
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
|
||||||
|
class APIHomeAssistant(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for Home Assistant functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return host information."""
|
||||||
|
return {
|
||||||
|
ATTR_VERSION: self.sys_homeassistant.version,
|
||||||
|
ATTR_LAST_VERSION: self.sys_homeassistant.latest_version,
|
||||||
|
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||||
|
ATTR_IP_ADDRESS: str(self.sys_homeassistant.ip_address),
|
||||||
|
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||||
|
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||||
|
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||||
|
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||||
|
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||||
|
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||||
|
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||||
|
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request: web.Request) -> None:
|
||||||
|
"""Set Home Assistant options."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||||
|
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||||
|
self.sys_homeassistant.latest_version = body[ATTR_LAST_VERSION]
|
||||||
|
|
||||||
|
if ATTR_BOOT in body:
|
||||||
|
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||||
|
|
||||||
|
if ATTR_PORT in body:
|
||||||
|
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||||
|
|
||||||
|
if ATTR_SSL in body:
|
||||||
|
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||||
|
|
||||||
|
if ATTR_WATCHDOG in body:
|
||||||
|
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
|
if ATTR_WAIT_BOOT in body:
|
||||||
|
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
|
if ATTR_REFRESH_TOKEN in body:
|
||||||
|
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||||
|
|
||||||
|
self.sys_homeassistant.save_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request: web.Request) -> Dict[Any, str]:
|
||||||
|
"""Return resource information."""
|
||||||
|
stats = await self.sys_homeassistant.stats()
|
||||||
|
if not stats:
|
||||||
|
raise APIError("No stats available")
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request: web.Request) -> None:
|
||||||
|
"""Update Home Assistant."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
|
||||||
|
|
||||||
|
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def stop(self, request: web.Request) -> Coroutine:
|
||||||
|
"""Stop Home Assistant."""
|
||||||
|
return asyncio.shield(self.sys_homeassistant.stop())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def start(self, request: web.Request) -> Coroutine:
|
||||||
|
"""Start Home Assistant."""
|
||||||
|
return asyncio.shield(self.sys_homeassistant.start())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def restart(self, request: web.Request) -> Coroutine:
|
||||||
|
"""Restart Home Assistant."""
|
||||||
|
return asyncio.shield(self.sys_homeassistant.restart())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def rebuild(self, request: web.Request) -> Coroutine:
|
||||||
|
"""Rebuild Home Assistant."""
|
||||||
|
return asyncio.shield(self.sys_homeassistant.rebuild())
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Coroutine:
|
||||||
|
"""Return Home Assistant Docker logs."""
|
||||||
|
return self.sys_homeassistant.logs()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def check(self, request: web.Request) -> None:
|
||||||
|
"""Check configuration of Home Assistant."""
|
||||||
|
result = await self.sys_homeassistant.check_config()
|
||||||
|
if not result.valid:
|
||||||
|
raise APIError(result.log)
|
109
hassio/api/host.py
Normal file
109
hassio/api/host.py
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
"""Init file for Hass.io host RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from .utils import api_process, api_validate
|
||||||
|
from ..const import (
|
||||||
|
ATTR_HOSTNAME,
|
||||||
|
ATTR_FEATURES,
|
||||||
|
ATTR_KERNEL,
|
||||||
|
ATTR_OPERATING_SYSTEM,
|
||||||
|
ATTR_CHASSIS,
|
||||||
|
ATTR_DEPLOYMENT,
|
||||||
|
ATTR_STATE,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_DESCRIPTON,
|
||||||
|
ATTR_SERVICES,
|
||||||
|
ATTR_CPE,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SERVICE = "service"
|
||||||
|
|
||||||
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
|
||||||
|
|
||||||
|
|
||||||
|
class APIHost(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for host functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request):
|
||||||
|
"""Return host information."""
|
||||||
|
return {
|
||||||
|
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||||
|
ATTR_CPE: self.sys_host.info.cpe,
|
||||||
|
ATTR_FEATURES: self.sys_host.supperted_features,
|
||||||
|
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||||
|
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||||
|
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||||
|
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request):
|
||||||
|
"""Edit host settings."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
# hostname
|
||||||
|
if ATTR_HOSTNAME in body:
|
||||||
|
await asyncio.shield(
|
||||||
|
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME])
|
||||||
|
)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def reboot(self, request):
|
||||||
|
"""Reboot host."""
|
||||||
|
return asyncio.shield(self.sys_host.control.reboot())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def shutdown(self, request):
|
||||||
|
"""Poweroff host."""
|
||||||
|
return asyncio.shield(self.sys_host.control.shutdown())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def reload(self, request):
|
||||||
|
"""Reload host data."""
|
||||||
|
return asyncio.shield(self.sys_host.reload())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def services(self, request):
|
||||||
|
"""Return list of available services."""
|
||||||
|
services = []
|
||||||
|
for unit in self.sys_host.services:
|
||||||
|
services.append(
|
||||||
|
{
|
||||||
|
ATTR_NAME: unit.name,
|
||||||
|
ATTR_DESCRIPTON: unit.description,
|
||||||
|
ATTR_STATE: unit.state,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {ATTR_SERVICES: services}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def service_start(self, request):
|
||||||
|
"""Start a service."""
|
||||||
|
unit = request.match_info.get(SERVICE)
|
||||||
|
return asyncio.shield(self.sys_host.services.start(unit))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def service_stop(self, request):
|
||||||
|
"""Stop a service."""
|
||||||
|
unit = request.match_info.get(SERVICE)
|
||||||
|
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def service_reload(self, request):
|
||||||
|
"""Reload a service."""
|
||||||
|
unit = request.match_info.get(SERVICE)
|
||||||
|
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def service_restart(self, request):
|
||||||
|
"""Restart a service."""
|
||||||
|
unit = request.match_info.get(SERVICE)
|
||||||
|
return asyncio.shield(self.sys_host.services.restart(unit))
|
42
hassio/api/info.py
Normal file
42
hassio/api/info.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
"""Init file for Hass.io info RESTful API."""
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_ARCH,
|
||||||
|
ATTR_CHANNEL,
|
||||||
|
ATTR_HASSOS,
|
||||||
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_HOSTNAME,
|
||||||
|
ATTR_LOGGING,
|
||||||
|
ATTR_MACHINE,
|
||||||
|
ATTR_SUPERVISOR,
|
||||||
|
ATTR_SUPPORTED_ARCH,
|
||||||
|
ATTR_TIMEZONE,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from .utils import api_process
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class APIInfo(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for info functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Show system info."""
|
||||||
|
return {
|
||||||
|
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||||
|
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||||
|
ATTR_HASSOS: self.sys_hassos.version,
|
||||||
|
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||||
|
ATTR_MACHINE: self.sys_machine,
|
||||||
|
ATTR_ARCH: self.sys_arch.default,
|
||||||
|
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||||
|
ATTR_CHANNEL: self.sys_updater.channel,
|
||||||
|
ATTR_LOGGING: self.sys_config.logging,
|
||||||
|
ATTR_TIMEZONE: self.sys_timezone,
|
||||||
|
}
|
271
hassio/api/ingress.py
Normal file
271
hassio/api/ingress.py
Normal file
@ -0,0 +1,271 @@
|
|||||||
|
"""Hass.io Add-on ingress service."""
|
||||||
|
import asyncio
|
||||||
|
from ipaddress import ip_address
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, Union
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp import hdrs, web
|
||||||
|
from aiohttp.web_exceptions import (
|
||||||
|
HTTPBadGateway,
|
||||||
|
HTTPServiceUnavailable,
|
||||||
|
HTTPUnauthorized,
|
||||||
|
)
|
||||||
|
from multidict import CIMultiDict, istr
|
||||||
|
|
||||||
|
from ..addons.addon import Addon
|
||||||
|
from ..const import (
|
||||||
|
ATTR_ADMIN,
|
||||||
|
ATTR_ICON,
|
||||||
|
ATTR_SESSION,
|
||||||
|
ATTR_TITLE,
|
||||||
|
ATTR_PANELS,
|
||||||
|
ATTR_ENABLE,
|
||||||
|
COOKIE_INGRESS,
|
||||||
|
HEADER_TOKEN,
|
||||||
|
REQUEST_FROM,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from .utils import api_process
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class APIIngress(CoreSysAttributes):
|
||||||
|
"""Ingress view to handle add-on webui routing."""
|
||||||
|
|
||||||
|
def _extract_addon(self, request: web.Request) -> Addon:
|
||||||
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
|
token = request.match_info.get("token")
|
||||||
|
|
||||||
|
# Find correct add-on
|
||||||
|
addon = self.sys_ingress.get(token)
|
||||||
|
if not addon:
|
||||||
|
_LOGGER.warning("Ingress for %s not available", token)
|
||||||
|
raise HTTPServiceUnavailable()
|
||||||
|
|
||||||
|
return addon
|
||||||
|
|
||||||
|
def _check_ha_access(self, request: web.Request) -> None:
|
||||||
|
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||||
|
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||||
|
raise HTTPUnauthorized()
|
||||||
|
|
||||||
|
def _create_url(self, addon: Addon, path: str) -> str:
|
||||||
|
"""Create URL to container."""
|
||||||
|
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def panels(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Create a list of panel data."""
|
||||||
|
addons = {}
|
||||||
|
for addon in self.sys_ingress.addons:
|
||||||
|
addons[addon.slug] = {
|
||||||
|
ATTR_TITLE: addon.panel_title,
|
||||||
|
ATTR_ICON: addon.panel_icon,
|
||||||
|
ATTR_ADMIN: addon.panel_admin,
|
||||||
|
ATTR_ENABLE: addon.ingress_panel,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {ATTR_PANELS: addons}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Create a new session."""
|
||||||
|
self._check_ha_access(request)
|
||||||
|
|
||||||
|
session = self.sys_ingress.create_session()
|
||||||
|
return {ATTR_SESSION: session}
|
||||||
|
|
||||||
|
async def handler(
|
||||||
|
self, request: web.Request
|
||||||
|
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||||
|
"""Route data to Hass.io ingress service."""
|
||||||
|
self._check_ha_access(request)
|
||||||
|
|
||||||
|
# Check Ingress Session
|
||||||
|
session = request.cookies.get(COOKIE_INGRESS)
|
||||||
|
if not self.sys_ingress.validate_session(session):
|
||||||
|
_LOGGER.warning("No valid ingress session %s", session)
|
||||||
|
raise HTTPUnauthorized()
|
||||||
|
|
||||||
|
# Process requests
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
path = request.match_info.get("path")
|
||||||
|
try:
|
||||||
|
# Websocket
|
||||||
|
if _is_websocket(request):
|
||||||
|
return await self._handle_websocket(request, addon, path)
|
||||||
|
|
||||||
|
# Request
|
||||||
|
return await self._handle_request(request, addon, path)
|
||||||
|
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
_LOGGER.error("Ingress error: %s", err)
|
||||||
|
|
||||||
|
raise HTTPBadGateway() from None
|
||||||
|
|
||||||
|
async def _handle_websocket(
|
||||||
|
self, request: web.Request, addon: Addon, path: str
|
||||||
|
) -> web.WebSocketResponse:
|
||||||
|
"""Ingress route for websocket."""
|
||||||
|
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||||
|
req_protocols = [
|
||||||
|
str(proto.strip())
|
||||||
|
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
req_protocols = ()
|
||||||
|
|
||||||
|
ws_server = web.WebSocketResponse(
|
||||||
|
protocols=req_protocols, autoclose=False, autoping=False
|
||||||
|
)
|
||||||
|
await ws_server.prepare(request)
|
||||||
|
|
||||||
|
# Preparing
|
||||||
|
url = self._create_url(addon, path)
|
||||||
|
source_header = _init_header(request, addon)
|
||||||
|
|
||||||
|
# Support GET query
|
||||||
|
if request.query_string:
|
||||||
|
url = "{}?{}".format(url, request.query_string)
|
||||||
|
|
||||||
|
# Start proxy
|
||||||
|
async with self.sys_websession.ws_connect(
|
||||||
|
url,
|
||||||
|
headers=source_header,
|
||||||
|
protocols=req_protocols,
|
||||||
|
autoclose=False,
|
||||||
|
autoping=False,
|
||||||
|
) as ws_client:
|
||||||
|
# Proxy requests
|
||||||
|
await asyncio.wait(
|
||||||
|
[
|
||||||
|
_websocket_forward(ws_server, ws_client),
|
||||||
|
_websocket_forward(ws_client, ws_server),
|
||||||
|
],
|
||||||
|
return_when=asyncio.FIRST_COMPLETED,
|
||||||
|
)
|
||||||
|
|
||||||
|
return ws_server
|
||||||
|
|
||||||
|
async def _handle_request(
|
||||||
|
self, request: web.Request, addon: Addon, path: str
|
||||||
|
) -> Union[web.Response, web.StreamResponse]:
|
||||||
|
"""Ingress route for request."""
|
||||||
|
url = self._create_url(addon, path)
|
||||||
|
data = await request.read()
|
||||||
|
source_header = _init_header(request, addon)
|
||||||
|
|
||||||
|
async with self.sys_websession.request(
|
||||||
|
request.method,
|
||||||
|
url,
|
||||||
|
headers=source_header,
|
||||||
|
params=request.query,
|
||||||
|
allow_redirects=False,
|
||||||
|
data=data,
|
||||||
|
) as result:
|
||||||
|
headers = _response_header(result)
|
||||||
|
|
||||||
|
# Simple request
|
||||||
|
if (
|
||||||
|
hdrs.CONTENT_LENGTH in result.headers
|
||||||
|
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||||
|
):
|
||||||
|
# Return Response
|
||||||
|
body = await result.read()
|
||||||
|
return web.Response(
|
||||||
|
headers=headers,
|
||||||
|
status=result.status,
|
||||||
|
content_type=result.content_type,
|
||||||
|
body=body,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Stream response
|
||||||
|
response = web.StreamResponse(status=result.status, headers=headers)
|
||||||
|
response.content_type = result.content_type
|
||||||
|
|
||||||
|
try:
|
||||||
|
await response.prepare(request)
|
||||||
|
async for data in result.content.iter_chunked(4096):
|
||||||
|
await response.write(data)
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, aiohttp.ClientPayloadError) as err:
|
||||||
|
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def _init_header(
|
||||||
|
request: web.Request, addon: str
|
||||||
|
) -> Union[CIMultiDict, Dict[str, str]]:
|
||||||
|
"""Create initial header."""
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
# filter flags
|
||||||
|
for name, value in request.headers.items():
|
||||||
|
if name in (
|
||||||
|
hdrs.CONTENT_LENGTH,
|
||||||
|
hdrs.CONTENT_ENCODING,
|
||||||
|
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||||
|
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||||
|
hdrs.SEC_WEBSOCKET_VERSION,
|
||||||
|
hdrs.SEC_WEBSOCKET_KEY,
|
||||||
|
istr(HEADER_TOKEN),
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
headers[name] = value
|
||||||
|
|
||||||
|
# Update X-Forwarded-For
|
||||||
|
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||||
|
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||||
|
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
|
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
|
||||||
|
"""Create response header."""
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
for name, value in response.headers.items():
|
||||||
|
if name in (
|
||||||
|
hdrs.TRANSFER_ENCODING,
|
||||||
|
hdrs.CONTENT_LENGTH,
|
||||||
|
hdrs.CONTENT_TYPE,
|
||||||
|
hdrs.CONTENT_ENCODING,
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
headers[name] = value
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
|
def _is_websocket(request: web.Request) -> bool:
|
||||||
|
"""Return True if request is a websocket."""
|
||||||
|
headers = request.headers
|
||||||
|
|
||||||
|
if (
|
||||||
|
"upgrade" in headers.get(hdrs.CONNECTION, "").lower()
|
||||||
|
and headers.get(hdrs.UPGRADE, "").lower() == "websocket"
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def _websocket_forward(ws_from, ws_to):
|
||||||
|
"""Handle websocket message directly."""
|
||||||
|
try:
|
||||||
|
async for msg in ws_from:
|
||||||
|
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||||
|
await ws_to.send_str(msg.data)
|
||||||
|
elif msg.type == aiohttp.WSMsgType.BINARY:
|
||||||
|
await ws_to.send_bytes(msg.data)
|
||||||
|
elif msg.type == aiohttp.WSMsgType.PING:
|
||||||
|
await ws_to.ping()
|
||||||
|
elif msg.type == aiohttp.WSMsgType.PONG:
|
||||||
|
await ws_to.pong()
|
||||||
|
elif ws_to.closed:
|
||||||
|
await ws_to.close(code=ws_to.close_code, message=msg.extra)
|
||||||
|
except RuntimeError:
|
||||||
|
_LOGGER.warning("Ingress Websocket runtime error")
|
2
hassio/api/panel/201359fd5a526afe13ef.worker.js
Normal file
2
hassio/api/panel/201359fd5a526afe13ef.worker.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/201359fd5a526afe13ef.worker.js.gz
Normal file
BIN
hassio/api/panel/201359fd5a526afe13ef.worker.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/201359fd5a526afe13ef.worker.js.map
Normal file
1
hassio/api/panel/201359fd5a526afe13ef.worker.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.00de7352e51443687ebb.js
Normal file
3
hassio/api/panel/chunk.00de7352e51443687ebb.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.00de7352e51443687ebb.js.LICENSE
Normal file
10
hassio/api/panel/chunk.00de7352e51443687ebb.js.LICENSE
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
/**
|
||||||
|
@license
|
||||||
|
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||||
|
This code may only be used under the BSD style license found at
|
||||||
|
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||||
|
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||||
|
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||||
|
part of the polymer project is also subject to an additional IP rights grant
|
||||||
|
found at http://polymer.github.io/PATENTS.txt
|
||||||
|
*/
|
BIN
hassio/api/panel/chunk.00de7352e51443687ebb.js.gz
Normal file
BIN
hassio/api/panel/chunk.00de7352e51443687ebb.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.00de7352e51443687ebb.js.map
Normal file
1
hassio/api/panel/chunk.00de7352e51443687ebb.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.0b82745c7bdffe5c1404.js
Normal file
2
hassio/api/panel/chunk.0b82745c7bdffe5c1404.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.0b82745c7bdffe5c1404.js.gz
Normal file
BIN
hassio/api/panel/chunk.0b82745c7bdffe5c1404.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.0b82745c7bdffe5c1404.js.map
Normal file
1
hassio/api/panel/chunk.0b82745c7bdffe5c1404.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js
Normal file
3
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js.LICENSE
Normal file
10
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js.LICENSE
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
/**
|
||||||
|
@license
|
||||||
|
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||||
|
This code may only be used under the BSD style license found at
|
||||||
|
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||||
|
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||||
|
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||||
|
part of the polymer project is also subject to an additional IP rights grant
|
||||||
|
found at http://polymer.github.io/PATENTS.txt
|
||||||
|
*/
|
BIN
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js.gz
Normal file
BIN
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js.map
Normal file
1
hassio/api/panel/chunk.0c4f6887f9b7e7b11ef5.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.170381dce1aef5f33cec.js
Normal file
3
hassio/api/panel/chunk.170381dce1aef5f33cec.js
Normal file
File diff suppressed because one or more lines are too long
10
hassio/api/panel/chunk.170381dce1aef5f33cec.js.LICENSE
Normal file
10
hassio/api/panel/chunk.170381dce1aef5f33cec.js.LICENSE
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
/**
|
||||||
|
@license
|
||||||
|
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||||
|
This code may only be used under the BSD style license found at
|
||||||
|
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||||
|
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||||
|
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||||
|
part of the polymer project is also subject to an additional IP rights grant
|
||||||
|
found at http://polymer.github.io/PATENTS.txt
|
||||||
|
*/
|
BIN
hassio/api/panel/chunk.170381dce1aef5f33cec.js.gz
Normal file
BIN
hassio/api/panel/chunk.170381dce1aef5f33cec.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.170381dce1aef5f33cec.js.map
Normal file
1
hassio/api/panel/chunk.170381dce1aef5f33cec.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.2412396b4c6d55f3dec7.js
Normal file
2
hassio/api/panel/chunk.2412396b4c6d55f3dec7.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.2412396b4c6d55f3dec7.js.gz
Normal file
BIN
hassio/api/panel/chunk.2412396b4c6d55f3dec7.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.2412396b4c6d55f3dec7.js.map
Normal file
1
hassio/api/panel/chunk.2412396b4c6d55f3dec7.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.8527374a266cecf93aa9.js
Normal file
2
hassio/api/panel/chunk.8527374a266cecf93aa9.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.8527374a266cecf93aa9.js.gz
Normal file
BIN
hassio/api/panel/chunk.8527374a266cecf93aa9.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.8527374a266cecf93aa9.js.map
Normal file
1
hassio/api/panel/chunk.8527374a266cecf93aa9.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js
Normal file
3
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js
Normal file
File diff suppressed because one or more lines are too long
16
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js.LICENSE
Normal file
16
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js.LICENSE
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
/**
|
||||||
|
@license
|
||||||
|
Copyright 2018 Google Inc. All Rights Reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
BIN
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js.gz
Normal file
BIN
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js.map
Normal file
1
hassio/api/panel/chunk.87b1d37fc9b8a6f7e2a6.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.92a11ac1b80e0d7839d2.js
Normal file
2
hassio/api/panel/chunk.92a11ac1b80e0d7839d2.js
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
(self.webpackJsonp=self.webpackJsonp||[]).push([[2],{176:function(e,r,n){"use strict";n.r(r),n.d(r,"codeMirror",function(){return c}),n.d(r,"codeMirrorCss",function(){return i});var a=n(54),o=n.n(a),s=n(169),t=(n(170),n(171),n(11));o.a.commands.save=function(e){Object(t.a)(e.getWrapperElement(),"editor-save")};var c=o.a,i=s.a}}]);
|
||||||
|
//# sourceMappingURL=chunk.92a11ac1b80e0d7839d2.js.map
|
BIN
hassio/api/panel/chunk.92a11ac1b80e0d7839d2.js.gz
Normal file
BIN
hassio/api/panel/chunk.92a11ac1b80e0d7839d2.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.92a11ac1b80e0d7839d2.js.map
Normal file
1
hassio/api/panel/chunk.92a11ac1b80e0d7839d2.js.map
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"sources":["webpack:///./src/resources/codemirror.ts"],"names":["__webpack_require__","r","__webpack_exports__","d","codeMirror","codeMirrorCss","codemirror__WEBPACK_IMPORTED_MODULE_0__","codemirror__WEBPACK_IMPORTED_MODULE_0___default","n","codemirror_lib_codemirror_css__WEBPACK_IMPORTED_MODULE_1__","_common_dom_fire_event__WEBPACK_IMPORTED_MODULE_4__","_CodeMirror","commands","save","cm","fireEvent","getWrapperElement","_codeMirrorCss"],"mappings":"sFAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,+BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,kCAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,GAAAV,EAAA,KAAAA,EAAA,KAAAA,EAAA,KAQAW,IAAYC,SAASC,KAAO,SAACC,GAC3BC,YAAUD,EAAGE,oBAAqB,gBAE7B,IAAMZ,EAAkBO,IAClBN,EAAqBY","file":"chunk.92a11ac1b80e0d7839d2.js","sourcesContent":["// @ts-ignore\nimport _CodeMirror, { Editor } from \"codemirror\";\n// @ts-ignore\nimport _codeMirrorCss from \"codemirror/lib/codemirror.css\";\nimport \"codemirror/mode/yaml/yaml\";\nimport \"codemirror/mode/jinja2/jinja2\";\nimport { fireEvent } from \"../common/dom/fire_event\";\n\n_CodeMirror.commands.save = (cm: Editor) => {\n fireEvent(cm.getWrapperElement(), \"editor-save\");\n};\nexport const codeMirror: any = _CodeMirror;\nexport const codeMirrorCss: any = _codeMirrorCss;\n"],"sourceRoot":""}
|
2
hassio/api/panel/chunk.990ee58006b248f55d23.js
Normal file
2
hassio/api/panel/chunk.990ee58006b248f55d23.js
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user