Compare commits

..

No commits in common. "main" and "2023.09.0" have entirely different histories.

4423 changed files with 510504 additions and 34544 deletions

View File

@ -1,51 +1,40 @@
{ {
"name": "Supervisor dev", "name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor", "image": "ghcr.io/home-assistant/devcontainer:supervisor",
"containerEnv": { "containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}" "WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
}, },
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"], "appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup", "postCreateCommand": "bash devcontainer_bootstrap",
"postStartCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"], "runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": { "extensions": [
"vscode": { "ms-python.python",
"extensions": [ "ms-python.vscode-pylance",
"charliermarsh.ruff", "visualstudioexptteam.vscodeintellicode",
"ms-python.pylint", "esbenp.prettier-vscode"
"ms-python.vscode-pylance", ],
"visualstudioexptteam.vscodeintellicode", "mounts": ["type=volume,target=/var/lib/docker"],
"redhat.vscode-yaml", "settings": {
"esbenp.prettier-vscode", "terminal.integrated.profiles.linux": {
"GitHub.vscode-pull-request-github" "zsh": {
], "path": "/usr/bin/zsh"
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
} }
} },
}, "terminal.integrated.defaultProfile.linux": "zsh",
"mounts": [ "editor.formatOnPaste": false,
"type=volume,target=/var/lib/docker", "editor.formatOnSave": true,
"type=volume,target=/mnt/supervisor" "editor.formatOnType": true,
] "files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py310"],
"python.formatting.blackPath": "/usr/local/bin/black",
"python.linting.banditPath": "/usr/local/bin/bandit",
"python.linting.flake8Path": "/usr/local/bin/flake8",
"python.linting.mypyPath": "/usr/local/bin/mypy",
"python.linting.pylintPath": "/usr/local/bin/pylint",
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
}
} }

69
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,69 @@
---
name: Report a bug with the Supervisor on a supported System
about: Report an issue related to the Home Assistant Supervisor.
labels: bug
---
<!-- READ THIS FIRST:
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
- If you have a problem with an add-on, make an issue in it's repository.
-->
<!--
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
-->
### Describe the issue
<!-- Provide as many details as possible. -->
### Steps to reproduce
<!-- What do you do to encounter the issue. -->
1. ...
2. ...
3. ...
### Enviroment details
<!-- You can find these details in the system tab of the supervisor panel, or by using the `ha` CLI. -->
- **Operating System:**: xxx
- **Supervisor version:**: xxx
- **Home Assistant version**: xxx
### Supervisor logs
<details>
<summary>Supervisor logs</summary>
<!--
- Frontend -> Supervisor -> System
- Or use this command: ha supervisor logs
- Logs are more than just errors, even if you don't think it's important, it is.
-->
```
Paste supervisor logs here
```
</details>
### System Information
<details>
<summary>System Information</summary>
<!--
- Use this command: ha info
-->
```
Paste system info here
```
</details>

View File

@ -1,5 +1,6 @@
name: Report an issue with Home Assistant Supervisor name: Bug Report Form
description: Report an issue related to the Home Assistant Supervisor. description: Report an issue related to the Home Assistant Supervisor.
labels: bug
body: body:
- type: markdown - type: markdown
attributes: attributes:
@ -8,7 +9,7 @@ body:
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr]. If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
[fr]: https://github.com/orgs/home-assistant/discussions [fr]: https://community.home-assistant.io/c/feature-requests
- type: textarea - type: textarea
validations: validations:
required: true required: true
@ -25,7 +26,7 @@ body:
attributes: attributes:
label: What type of installation are you running? label: What type of installation are you running?
description: > description: >
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/). If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
It is listed as the `Installation Type` value. It is listed as the `Installation Type` value.
options: options:
- Home Assistant OS - Home Assistant OS
@ -71,21 +72,20 @@ body:
validations: validations:
required: true required: true
attributes: attributes:
label: System information label: System Health information
description: > description: >
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/). System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
Click the copy button at the bottom of the pop-up and paste it here. Click the copy button at the bottom of the pop-up and paste it here.
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/) [![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
- type: textarea - type: textarea
attributes: attributes:
label: Supervisor diagnostics label: Supervisor diagnostics
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)" placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >- description: >-
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/). Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
and select 'Download diagnostics'.
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.** **Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
- type: textarea - type: textarea
attributes: attributes:

View File

@ -13,7 +13,7 @@ contact_links:
about: Our documentation has its own issue tracker. Please report issues with the website there. about: Our documentation has its own issue tracker. Please report issues with the website there.
- name: Request a feature for the Supervisor - name: Request a feature for the Supervisor
url: https://github.com/orgs/home-assistant/discussions url: https://community.home-assistant.io/c/feature-requests
about: Request an new feature for the Supervisor. about: Request an new feature for the Supervisor.
- name: I have a question or need support - name: I have a question or need support

View File

@ -1,53 +0,0 @@
name: Task
description: For staff only - Create a task
type: Task
body:
- type: markdown
attributes:
value: |
## ⚠️ RESTRICTED ACCESS
**This form is restricted to Open Home Foundation staff and authorized contributors only.**
If you are a community member wanting to contribute, please:
- For bug reports: Use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
---
### For authorized contributors
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
- type: textarea
id: description
attributes:
label: Description
description: |
Provide a clear and detailed description of the task that needs to be accomplished.
Be specific about what needs to be done, why it's important, and any constraints or requirements.
placeholder: |
Describe the task, including:
- What needs to be done
- Why this task is needed
- Expected outcome
- Any constraints or requirements
validations:
required: true
- type: textarea
id: additional_context
attributes:
label: Additional context
description: |
Any additional information, links, research, or context that would be helpful.
Include links to related issues, research, prototypes, roadmap opportunities etc.
placeholder: |
- Roadmap opportunity: [link]
- Epic: [link]
- Feature request: [link]
- Technical design documents: [link]
- Prototype/mockup: [link]
- Dependencies: [links]
validations:
required: false

View File

@ -38,7 +38,6 @@
- This PR is related to issue: - This PR is related to issue:
- Link to documentation pull request: - Link to documentation pull request:
- Link to cli pull request: - Link to cli pull request:
- Link to client library pull request:
## Checklist ## Checklist
@ -53,14 +52,12 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass** - [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR. - [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist] - [ ] I have followed the [development checklist][dev-checklist]
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`) - [ ] The code has been formatted using Black (`black --fast supervisor tests`)
- [ ] Tests have been added to verify that the new code works. - [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed: If API endpoints of add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository] - [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!-- <!--
Thank you for contributing <3 Thank you for contributing <3
@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html [dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant [docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@ -1,288 +0,0 @@
# GitHub Copilot & Claude Code Instructions
This repository contains the Home Assistant Supervisor, a Python 3 based container
orchestration and management system for Home Assistant.
## Supervisor Capabilities & Features
### Architecture Overview
Home Assistant Supervisor is a Python-based container orchestration system that
communicates with the Docker daemon to manage containerized components. It is tightly
integrated with the underlying Operating System and core Operating System components
through D-Bus.
**Managed Components:**
- **Home Assistant Core**: The main home automation application running in its own
container (also provides the web interface)
- **Add-ons**: Third-party applications and services (each add-on runs in its own
container)
- **Plugins**: Built-in system services like DNS, Audio, CLI, Multicast, and Observer
- **Host System Integration**: OS-level operations and hardware access via D-Bus
- **Container Networking**: Internal Docker network management and external
connectivity
- **Storage & Backup**: Data persistence and backup management across all containers
**Key Dependencies:**
- **Docker Engine**: Required for all container operations
- **D-Bus**: System-level communication with the host OS
- **systemd**: Service management for host system operations
- **NetworkManager**: Network configuration and management
### Add-on System
**Add-on Architecture**: Add-ons are containerized applications available through
add-on stores. Each store contains multiple add-ons, and each add-on includes metadata
that tells Supervisor the version, startup configuration (permissions), and available
user configurable options. Add-on metadata typically references a container image that
Supervisor fetches during installation. If not, the Supervisor builds the container
image from a Dockerfile.
**Built-in Stores**: Supervisor comes with several pre-configured stores:
- **Core Add-ons**: Official add-ons maintained by the Home Assistant team
- **Community Add-ons**: Popular third-party add-ons repository
- **ESPHome**: Add-ons for ESPHome ecosystem integration
- **Music Assistant**: Audio and music-related add-ons
- **Local Development**: Local folder for testing custom add-ons during development
**Store Management**: Stores are Git-based repositories that are periodically updated.
When updates are available, users receive notifications.
**Add-on Lifecycle**:
- **Installation**: Supervisor fetches or builds container images based on add-on
metadata
- **Configuration**: Schema-validated options with integrated UI management
- **Runtime**: Full container lifecycle management, health monitoring
- **Updates**: Automatic or manual version management
### Update System
**Core Components**: Supervisor, Home Assistant Core, HAOS, and built-in plugins
receive version information from a central JSON file fetched from
`https://version.home-assistant.io/{channel}.json`. The `Updater` class handles
fetching this data, validating signatures, and updating internal version tracking.
**Update Channels**: Three channels (`stable`/`beta`/`dev`) determine which version
JSON file is fetched, allowing users to opt into different release streams.
**Add-on Updates**: Add-on version information comes from store repository updates, not
the central JSON file. When repositories are refreshed via the store system, add-ons
compare their local versions against repository versions to determine update
availability.
### Backup & Recovery System
**Backup Capabilities**:
- **Full Backups**: Complete system state capture including all add-ons,
configuration, and data
- **Partial Backups**: Selective backup of specific components (Home Assistant,
add-ons, folders)
- **Encrypted Backups**: Optional backup encryption with user-provided passwords
- **Multiple Storage Locations**: Local storage and remote backup destinations
**Recovery Features**:
- **One-click Restore**: Simple restoration from backup files
- **Selective Restore**: Choose specific components to restore
- **Automatic Recovery**: Self-healing for common system issues
---
## Supervisor Development
### Python Requirements
- **Compatibility**: Python 3.13+
- **Language Features**: Use modern Python features:
- Type hints with `typing` module
- f-strings (preferred over `%` or `.format()`)
- Dataclasses and enum classes
- Async/await patterns
- Pattern matching where appropriate
### Code Quality Standards
- **Formatting**: Ruff
- **Linting**: PyLint and Ruff
- **Type Checking**: MyPy
- **Testing**: pytest with asyncio support
- **Language**: American English for all code, comments, and documentation
### Code Organization
**Core Structure**:
```
supervisor/
├── __init__.py # Package initialization
├── const.py # Constants and enums
├── coresys.py # Core system management
├── bootstrap.py # System initialization
├── exceptions.py # Custom exception classes
├── api/ # REST API endpoints
├── addons/ # Add-on management
├── backups/ # Backup system
├── docker/ # Docker integration
├── host/ # Host system interface
├── homeassistant/ # Home Assistant Core management
├── dbus/ # D-Bus system integration
├── hardware/ # Hardware detection and management
├── plugins/ # Plugin system
├── resolution/ # Issue detection and resolution
├── security/ # Security management
├── services/ # Service discovery and management
├── store/ # Add-on store management
└── utils/ # Utility functions
```
**Shared Constants**: Use constants from `supervisor/const.py` instead of hardcoding
values. Define new constants following existing patterns and group related constants
together.
### Supervisor Architecture Patterns
**CoreSysAttributes Inheritance Pattern**: Nearly all major classes in Supervisor
inherit from `CoreSysAttributes`, providing access to the centralized system state
via `self.coresys` and convenient `sys_*` properties.
```python
# Standard Supervisor class pattern
class MyManager(CoreSysAttributes):
"""Manage my functionality."""
def __init__(self, coresys: CoreSys):
"""Initialize manager."""
self.coresys: CoreSys = coresys
self._component: MyComponent = MyComponent(coresys)
@property
def component(self) -> MyComponent:
"""Return component handler."""
return self._component
# Access system components via inherited properties
async def do_something(self):
await self.sys_docker.containers.get("my_container")
self.sys_bus.fire_event(BusEvent.MY_EVENT, {"data": "value"})
```
**Key Inherited Properties from CoreSysAttributes**:
- `self.sys_docker` - Docker API access
- `self.sys_run_in_executor()` - Execute blocking operations
- `self.sys_create_task()` - Create async tasks
- `self.sys_bus` - Event bus for system events
- `self.sys_config` - System configuration
- `self.sys_homeassistant` - Home Assistant Core management
- `self.sys_addons` - Add-on management
- `self.sys_host` - Host system access
- `self.sys_dbus` - D-Bus system interface
**Load Pattern**: Many components implement a `load()` method which effectively
initialize the component from external sources (containers, files, D-Bus services).
### API Development
**REST API Structure**:
- **Base Path**: `/api/` for all endpoints
- **Authentication**: Bearer token authentication
- **Consistent Response Format**: `{"result": "ok", "data": {...}}` or
`{"result": "error", "message": "..."}`
- **Validation**: Use voluptuous schemas with `api_validate()`
**Use `@api_process` Decorator**: This decorator handles all standard error handling
and response formatting automatically. The decorator catches `APIError`, `HassioError`,
and other exceptions, returning appropriate HTTP responses.
```python
from ..api.utils import api_process, api_validate
@api_process
async def backup_full(self, request: web.Request) -> dict[str, Any]:
"""Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request)
job = await self.sys_backups.do_backup_full(**body)
return {ATTR_JOB_ID: job.uuid}
```
### Docker Integration
- **Container Management**: Use Supervisor's Docker manager instead of direct
Docker API
- **Networking**: Supervisor manages internal Docker networks with predefined IP
ranges
- **Security**: AppArmor profiles, capability restrictions, and user namespace
isolation
- **Health Checks**: Implement health monitoring for all managed containers
### D-Bus Integration
- **Use dbus-fast**: Async D-Bus library for system integration
- **Service Management**: systemd, NetworkManager, hostname management
- **Error Handling**: Wrap D-Bus exceptions in Supervisor-specific exceptions
### Async Programming
- **All I/O operations must be async**: File operations, network calls, subprocess
execution
- **Use asyncio patterns**: Prefer `asyncio.gather()` over sequential awaits
- **Executor jobs**: Use `self.sys_run_in_executor()` for blocking operations
- **Two-phase initialization**: `__init__` for sync setup, `post_init()` for async
initialization
### Testing
- **Location**: `tests/` directory with module mirroring
- **Fixtures**: Extensive use of pytest fixtures for CoreSys setup
- **Mocking**: Mock external dependencies (Docker, D-Bus, network calls)
- **Coverage**: Minimum 90% test coverage, 100% for security-sensitive code
### Error Handling
- **Custom Exceptions**: Defined in `exceptions.py` with clear inheritance hierarchy
- **Error Propagation**: Use `from` clause for exception chaining
- **API Errors**: Use `APIError` with appropriate HTTP status codes
### Security Considerations
- **Container Security**: AppArmor profiles mandatory for add-ons, minimal
capabilities
- **Authentication**: Token-based API authentication with role-based access
- **Data Protection**: Backup encryption, secure secret management, comprehensive
input validation
### Development Commands
```bash
# Run tests, adjust paths as necessary
pytest -qsx tests/
# Linting and formatting
ruff check supervisor/
ruff format supervisor/
# Type checking
mypy --ignore-missing-imports supervisor/
# Pre-commit hooks
pre-commit run --all-files
```
Always run the pre-commit hooks at the end of code editing.
### Common Patterns to Follow
**✅ Use These Patterns**:
- Inherit from `CoreSysAttributes` for system access
- Use `@api_process` decorator for API endpoints
- Use `self.sys_run_in_executor()` for blocking operations
- Access Docker via `self.sys_docker` not direct Docker API
- Use constants from `const.py` instead of hardcoding
- Store types in (per-module) `const.py` (e.g. supervisor/store/const.py)
**❌ Avoid These Patterns**:
- Direct Docker API usage - use Supervisor's Docker manager
- Blocking operations in async context (use asyncio alternatives)
- Hardcoded values - use constants from `const.py`
- Manual error handling in API endpoints - let `@api_process` handle it
This guide provides the foundation for contributing to Home Assistant Supervisor.
Follow these patterns and guidelines to ensure code quality, security, and
maintainability.

View File

@ -33,7 +33,7 @@ on:
- setup.py - setup.py
env: env:
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.11"
BUILD_NAME: supervisor BUILD_NAME: supervisor
BUILD_TYPE: supervisor BUILD_TYPE: supervisor
@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }} requirements: ${{ steps.requirements.outputs.changed }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
with: with:
fetch-depth: 0 fetch-depth: 0
@ -70,13 +70,13 @@ jobs:
- name: Get changed files - name: Get changed files
id: changed_files id: changed_files
if: steps.version.outputs.publish == 'false' if: steps.version.outputs.publish == 'false'
uses: masesgroup/retrieve-changed-files@v3.0.0 uses: jitterbit/get-changed-files@v1
- name: Check if requirements files changed - name: Check if requirements files changed
id: requirements id: requirements
run: | run: |
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT" echo "::set-output name=changed::true"
fi fi
build: build:
@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
with: with:
fetch-depth: 0 fetch-depth: 0
@ -106,13 +106,13 @@ jobs:
- name: Build wheels - name: Build wheels
if: needs.init.outputs.requirements == 'true' if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2025.03.0 uses: home-assistant/wheels@2023.04.0
with: with:
abi: cp313 abi: cp311
tag: musllinux_1_2 tag: musllinux_1_2
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }} wheels-key: ${{ secrets.WHEELS_KEY }}
apk: "libffi-dev;openssl-dev;yaml-dev" apk: "libffi-dev;openssl-dev"
skip-binary: aiohttp skip-binary: aiohttp
env-file: true env-file: true
requirements: "requirements.txt" requirements: "requirements.txt"
@ -125,20 +125,20 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign - name: Install Cosign
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.9.2 uses: sigstore/cosign-installer@v3.1.2
with: with:
cosign-release: "v2.4.3" cosign-release: "v2.0.2"
- name: Install dirhash and calc hash - name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
run: | run: |
pip3 install setuptools dirhash pip3 install dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")" dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "${dir_hash}" > rootfs/supervisor.sha256 echo "${dir_hash}" > rootfs/supervisor.sha256
@ -149,7 +149,7 @@ jobs:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.4.0 uses: docker/login-action@v2.2.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor - name: Build supervisor
uses: home-assistant/builder@2025.03.0 uses: home-assistant/builder@2023.08.0
with: with:
args: | args: |
$BUILD_ARGS \ $BUILD_ARGS \
@ -178,7 +178,7 @@ jobs:
steps: steps:
- name: Checkout the repository - name: Checkout the repository
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Initialize git - name: Initialize git
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Build the Supervisor - name: Build the Supervisor
if: needs.init.outputs.publish != 'true' if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2025.03.0 uses: home-assistant/builder@2023.08.0
with: with:
args: | args: |
--test \ --test \
@ -324,7 +324,7 @@ jobs:
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1 exit 1
fi fi
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT" echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
- name: Uninstall SSH add-on - name: Uninstall SSH add-on
run: | run: |

View File

@ -8,9 +8,8 @@ on:
pull_request: ~ pull_request: ~
env: env:
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.11"
PRE_COMMIT_CACHE: ~/.cache/pre-commit PRE_COMMIT_HOME: ~/.cache/pre-commit
MYPY_CACHE_VERSION: 1
concurrency: concurrency:
group: "${{ github.workflow }}-${{ github.ref }}" group: "${{ github.workflow }}-${{ github.ref }}"
@ -26,15 +25,15 @@ jobs:
name: Prepare Python dependencies name: Prepare Python dependencies
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python - name: Set up Python
id: python id: python
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -48,10 +47,9 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_HOME }}
lookup-only: true
key: | key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: | restore-keys: |
@ -62,21 +60,21 @@ jobs:
. venv/bin/activate . venv/bin/activate
pre-commit install-hooks pre-commit install-hooks
lint-ruff-format: lint-black:
name: Check ruff-format name: Check black
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -86,67 +84,10 @@ jobs:
run: | run: |
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Run black
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff-format
run: | run: |
. venv/bin/activate . venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure black --target-version py38 --check supervisor tests setup.py
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-dockerfile: lint-dockerfile:
name: Check Dockerfile name: Check Dockerfile
@ -154,7 +95,7 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Register hadolint problem matcher - name: Register hadolint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json" echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@ -169,15 +110,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -189,9 +130,9 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed - name: Fail job if cache restore failed
@ -207,21 +148,53 @@ jobs:
. venv/bin/activate . venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-json: lint-flake8:
name: Check JSON name: Check flake8
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8
run: |
. venv/bin/activate
flake8 supervisor tests
lint-isort:
name: Check isort
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v4.7.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -233,9 +206,50 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run isort
run: |
. venv/bin/activate
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v4.7.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.3.1
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.3.1
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed - name: Fail job if cache restore failed
@ -257,15 +271,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -275,10 +289,6 @@ jobs:
run: | run: |
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0
- name: Register pylint problem matcher - name: Register pylint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json" echo "::add-matcher::.github/workflows/matchers/pylint.json"
@ -287,51 +297,46 @@ jobs:
. venv/bin/activate . venv/bin/activate
pylint supervisor tests pylint supervisor tests
mypy: lint-pyupgrade:
name: Check mypy name: Check pyupgrade
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Generate partial mypy restore key
id: generate-mypy-key
run: |
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
echo "version=$mypy_version" >> $GITHUB_OUTPUT
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: >- key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }} ${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed - name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
run: | run: |
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Restore mypy cache - name: Restore pre-commit environment from cache
uses: actions/cache@v4.2.3 id: cache-precommit
uses: actions/cache@v3.3.1
with: with:
path: .mypy_cache path: ${{ env.PRE_COMMIT_HOME }}
key: >- key: |
${{ runner.os }}-mypy-${{ needs.prepare.outputs.python-version }}-${{ steps.generate-mypy-key.outputs.key }} ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: >- - name: Fail job if cache restore failed
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-mypy-${{ env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }} if: steps.cache-venv.outputs.cache-hit != 'true'
- name: Register mypy problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/mypy.json" echo "Failed to restore Python virtual environment from cache"
- name: Run mypy exit 1
- name: Run pyupgrade
run: | run: |
. venv/bin/activate . venv/bin/activate
mypy --ignore-missing-imports supervisor pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
pytest: pytest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -339,19 +344,19 @@ jobs:
name: Run tests Python ${{ needs.prepare.outputs.python-version }} name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign - name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2 uses: sigstore/cosign-installer@v3.1.2
with: with:
cosign-release: "v2.4.3" cosign-release: "v2.0.2"
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -364,7 +369,7 @@ jobs:
- name: Install additional system dependencies - name: Install additional system dependencies
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
- name: Register Python problem matcher - name: Register Python problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/python.json" echo "::add-matcher::.github/workflows/matchers/python.json"
@ -386,11 +391,10 @@ jobs:
-o console_output_style=count \ -o console_output_style=count \
tests tests
- name: Upload coverage artifact - name: Upload coverage artifact
uses: actions/upload-artifact@v4.6.2 uses: actions/upload-artifact@v3.1.2
with: with:
name: coverage-${{ matrix.python-version }} name: coverage-${{ matrix.python-version }}
path: .coverage path: .coverage
include-hidden-files: true
coverage: coverage:
name: Process test coverage name: Process test coverage
@ -398,15 +402,15 @@ jobs:
needs: ["pytest", "prepare"] needs: ["pytest", "prepare"]
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v4.7.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v3.3.1
with: with:
path: venv path: venv
key: | key: |
@ -417,7 +421,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0 uses: actions/download-artifact@v3
- name: Combine coverage results - name: Combine coverage results
run: | run: |
. venv/bin/activate . venv/bin/activate
@ -425,4 +429,4 @@ jobs:
coverage report coverage report
coverage xml coverage xml
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v5.4.3 uses: codecov/codecov-action@v3.1.4

View File

@ -9,7 +9,7 @@ jobs:
lock: lock:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: dessant/lock-threads@v5.0.1 - uses: dessant/lock-threads@v4.0.1
with: with:
github-token: ${{ github.token }} github-token: ${{ github.token }}
issue-inactive-days: "30" issue-inactive-days: "30"

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@ -1,16 +0,0 @@
{
"problemMatcher": [
{
"owner": "mypy",
"pattern": [
{
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
"file": 1,
"line": 2,
"severity": 3,
"message": 4
}
]
}
]
}

View File

@ -11,7 +11,7 @@ jobs:
name: Release Drafter name: Release Drafter
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
with: with:
fetch-depth: 0 fetch-depth: 0
@ -33,10 +33,10 @@ jobs:
echo Current version: $latest echo Current version: $latest
echo New target version: $datepre.$newpost echo New target version: $datepre.$newpost
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT" echo "::set-output name=version::$datepre.$newpost"
- name: Run Release Drafter - name: Run Release Drafter
uses: release-drafter/release-drafter@v6.1.0 uses: release-drafter/release-drafter@v5.24.0
with: with:
tag: ${{ steps.version.outputs.version }} tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }} name: ${{ steps.version.outputs.version }}

View File

@ -1,58 +0,0 @@
name: Restrict task creation
# yamllint disable-line rule:truthy
on:
issues:
types: [opened]
jobs:
check-authorization:
runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.issue_type == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@v7
with:
script: |
const issueAuthor = context.payload.issue.user.login;
// Check if user is an organization member
try {
await github.rest.orgs.checkMembershipForUser({
org: 'home-assistant',
username: issueAuthor
});
console.log(`✅ ${issueAuthor} is an organization member`);
return; // Authorized
} catch (error) {
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
}
// Close the issue with a comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
`Task issues are restricted to Open Home Foundation staff and authorized contributors.\n\n` +
`If you would like to:\n` +
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)\n` +
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
`If you believe you should have access to create Task issues, please contact the maintainers.`
});
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
state: 'closed'
});
// Add a label to indicate this was auto-closed
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['auto-closed']
});

View File

@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.0.0
- name: Sentry Release - name: Sentry Release
uses: getsentry/action-release@v3.2.0 uses: getsentry/action-release@v1.4.1
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }} SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@ -9,7 +9,7 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@v9.1.0 - uses: actions/stale@v8.0.0
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30 days-before-stale: 30

View File

@ -1,82 +0,0 @@
name: Update frontend
on:
schedule: # once a day
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
check-version:
runs-on: ubuntu-latest
outputs:
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
current_version: ${{ steps.check_version.outputs.current_version }}
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get latest frontend release
id: latest_frontend_version
uses: abatilo/release-info-action@v1.3.3
with:
owner: home-assistant
repo: frontend
- name: Check if version is up to date
id: check_version
run: |
current_version="$(cat .ha-frontend-version)"
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
if [[ ! "$current_version" < "$latest_version" ]]; then
echo "Frontend version is up to date"
echo "skip=true" >> $GITHUB_OUTPUT
fi
- name: Check if there is no open PR with this version
if: steps.check_version.outputs.skip != 'true'
id: check_existing_pr
env:
GH_TOKEN: ${{ github.token }}
run: |
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
if [[ "$PR" != "[]" ]]; then
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
echo "skip=true" >> $GITHUB_OUTPUT
fi
create-pr:
runs-on: ubuntu-latest
needs: check-version
if: needs.check-version.outputs.skip != 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Clear www folder
run: |
rm -rf supervisor/api/panel/*
- name: Update version file
run: |
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
- name: Download release assets
uses: robinraju/release-downloader@v1
with:
repository: 'home-assistant/frontend'
tag: ${{ needs.check-version.outputs.latest_version }}
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
extract: true
out-file-path: supervisor/api/panel/
- name: Remove release assets archive
run: |
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
- name: Create PR
uses: peter-evans/create-pull-request@v7
with:
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
branch: autoupdate-frontend
base: main
draft: true
sign-commits: true
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
body: >
Update frontend from ${{ needs.check-version.outputs.current_version }} to
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer
branch = dev

View File

@ -1 +0,0 @@
20250401.0

View File

@ -3,5 +3,4 @@ ignored:
- DL3006 - DL3006
- DL3013 - DL3013
- DL3018 - DL3018
- DL3042
- SC2155 - SC2155

View File

@ -1,27 +1,34 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/psf/black
rev: v0.11.10 rev: 23.1.0
hooks: hooks:
- id: ruff - id: black
args: args:
- --fix - --safe
- id: ruff-format - --quiet
- --target-version
- py310
files: ^((supervisor|tests)/.+)?[^/]+\.py$ files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.7.0
- pydocstyle==6.3.0
files: ^(supervisor|script|tests)/.+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v4.3.0
hooks: hooks:
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
stages: [manual] stages: [manual]
- id: check-json - id: check-json
- repo: local - repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks: hooks:
# Run mypy through our wrapper script in order to get the possible - id: isort
# pyenv and/or virtualenv activated; it may not have been e.g. if - repo: https://github.com/asottile/pyupgrade
# committing from a GUI tool that was not launched from an activated rev: v3.4.0
# shell. hooks:
- id: mypy - id: pyupgrade
name: mypy args: [--py310-plus]
entry: script/run-in-env.sh mypy --ignore-missing-imports
language: script
types_or: [python, pyi]
files: ^supervisor/.+\.(py|pyi)$

18
.vscode/tasks.json vendored
View File

@ -58,23 +58,9 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Ruff Check", "label": "Flake8",
"type": "shell", "type": "shell",
"command": "ruff check --fix supervisor tests", "command": "flake8 supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Format",
"type": "shell",
"command": "ruff format supervisor tests",
"group": { "group": {
"kind": "test", "kind": "test",
"isDefault": true "isDefault": true

View File

@ -1 +0,0 @@
.github/copilot-instructions.md

View File

@ -4,20 +4,17 @@ FROM ${BUILD_FROM}
ENV \ ENV \
S6_SERVICES_GRACETIME=10000 \ S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \ SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \ CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
UV_SYSTEM_PYTHON=true
ARG \ ARG \
COSIGN_VERSION \ COSIGN_VERSION \
BUILD_ARCH \ BUILD_ARCH
QEMU_CPU
# Install base # Install base
WORKDIR /usr/src WORKDIR /usr/src
RUN \ RUN \
set -x \ set -x \
&& apk add --no-cache \ && apk add --no-cache \
findutils \
eudev \ eudev \
eudev-libs \ eudev-libs \
git \ git \
@ -25,27 +22,23 @@ RUN \
libpulse \ libpulse \
musl \ musl \
openssl \ openssl \
yaml \
\ \
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \ && curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \ && chmod a+x /usr/bin/cosign
&& pip3 install uv==0.6.17
# Install requirements # Install requirements
COPY requirements.txt . COPY requirements.txt .
RUN \ RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \ export MAKEFLAGS="-j$(nproc)" \
setarch="linux32"; \ && pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
else \ "https://wheels.home-assistant.io/musllinux/" \
setarch=""; \ -r ./requirements.txt \
fi \
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
&& rm -f requirements.txt && rm -f requirements.txt
# Install Home Assistant Supervisor # Install Home Assistant Supervisor
COPY . supervisor COPY . supervisor
RUN \ RUN \
uv pip install --no-cache -e ./supervisor \ pip3 install --no-cache-dir -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor && python3 -m compileall ./supervisor/supervisor

View File

@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
[development]: https://developers.home-assistant.io/docs/supervisor/development [development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json [stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)

View File

@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21 aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21 armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21 armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.16
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21 amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.16
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21 i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.16
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io
@ -12,7 +12,7 @@ cosign:
base_identity: https://github.com/home-assistant/docker-base/.* base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.* identity: https://github.com/home-assistant/supervisor/.*
args: args:
COSIGN_VERSION: 2.4.3 COSIGN_VERSION: 2.0.2
labels: labels:
io.hass.type: supervisor io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor org.opencontainers.image.title: Home Assistant Supervisor

@ -0,0 +1 @@
Subproject commit 9d457d52e80fc936491f0e8ece773d90748889c4

45
pylintrc Normal file
View File

@ -0,0 +1,45 @@
[MASTER]
reports=no
jobs=2
good-names=id,i,j,k,ex,Run,_,fp,T,os
extension-pkg-whitelist=
ciso8601
# Reasons disabled:
# format - handled by black
# locally-disabled - it spams too much
# duplicate-code - unavoidable
# cyclic-import - doesn't test if both import on load
# abstract-class-not-used - is flaky, should not show up but does
# unused-argument - generic callbacks and setup methods create a lot of warnings
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# abstract-method - with intro of async there are always methods missing
disable=
format,
abstract-method,
cyclic-import,
duplicate-code,
locally-disabled,
no-else-return,
not-context-manager,
too-few-public-methods,
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
unused-argument,
consider-using-with
[EXCEPTIONS]
overgeneral-exceptions=builtins.Exception
[TYPECHECK]
ignored-modules = distutils

View File

@ -1,376 +0,0 @@
[build-system]
requires = ["setuptools~=80.9.0", "wheel~=0.46.1"]
build-backend = "setuptools.build_meta"
[project]
name = "Supervisor"
dynamic = ["version", "dependencies"]
license = { text = "Apache-2.0" }
description = "Open-source private cloud os for Home-Assistant based on HassOS"
readme = "README.md"
authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
]
keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.13.0"
[project.urls]
"Homepage" = "https://www.home-assistant.io/"
"Source Code" = "https://github.com/home-assistant/supervisor"
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
"Docs: Dev" = "https://developers.home-assistant.io/"
"Discord" = "https://www.home-assistant.io/join-chat/"
"Forum" = "https://community.home-assistant.io/"
[tool.setuptools]
platforms = ["any"]
zip-safe = false
include-package-data = true
[tool.setuptools.packages.find]
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.13"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
persistent = false
extension-pkg-allow-list = ["ciso8601"]
[tool.pylint.BASIC]
class-const-naming-style = "any"
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
[tool.pylint."MESSAGES CONTROL"]
# Reasons disabled:
# format - handled by ruff
# abstract-method - with intro of async there are always methods missing
# cyclic-import - doesn't test if both import on load
# duplicate-code - unavoidable
# locally-disabled - it spams too much
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# unused-argument - generic callbacks and setup methods create a lot of warnings
disable = [
"format",
"abstract-method",
"cyclic-import",
"duplicate-code",
"locally-disabled",
"no-else-return",
"not-context-manager",
"too-few-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-public-methods",
"too-many-return-statements",
"too-many-statements",
"unused-argument",
"consider-using-with",
# Handled by ruff
# Ref: <https://github.com/astral-sh/ruff/issues/970>
"await-outside-async", # PLE1142
"bad-str-strip-call", # PLE1310
"bad-string-format-type", # PLE1307
"bidirectional-unicode", # PLE2502
"continue-in-finally", # PLE0116
"duplicate-bases", # PLE0241
"format-needs-mapping", # F502
"function-redefined", # F811
# Needed because ruff does not understand type of __all__ generated by a function
# "invalid-all-format", # PLE0605
"invalid-all-object", # PLE0604
"invalid-character-backspace", # PLE2510
"invalid-character-esc", # PLE2513
"invalid-character-nul", # PLE2514
"invalid-character-sub", # PLE2512
"invalid-character-zero-width-space", # PLE2515
"logging-too-few-args", # PLE1206
"logging-too-many-args", # PLE1205
"missing-format-string-key", # F524
"mixed-format-string", # F506
"no-method-argument", # N805
"no-self-argument", # N805
"nonexistent-operator", # B002
"nonlocal-without-binding", # PLE0117
"not-in-loop", # F701, F702
"notimplemented-raised", # F901
"return-in-init", # PLE0101
"return-outside-function", # F706
"syntax-error", # E999
"too-few-format-args", # F524
"too-many-format-args", # F522
"too-many-star-expressions", # F622
"truncated-format-string", # F501
"undefined-all-variable", # F822
"undefined-variable", # F821
"used-prior-global-declaration", # PLE0118
"yield-inside-async-function", # PLE1700
"yield-outside-function", # F704
"anomalous-backslash-in-string", # W605
"assert-on-string-literal", # PLW0129
"assert-on-tuple", # F631
"bad-format-string", # W1302, F
"bad-format-string-key", # W1300, F
"bare-except", # E722
"binary-op-exception", # PLW0711
"cell-var-from-loop", # B023
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
"duplicate-except", # B014
"duplicate-key", # F601
"duplicate-string-formatting-argument", # F
"duplicate-value", # F
"eval-used", # PGH001
"exec-used", # S102
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
"f-string-without-interpolation", # F541
"forgotten-debug-statement", # T100
"format-string-without-interpolation", # F
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
"global-variable-not-assigned", # PLW0602
"implicit-str-concat", # ISC001
"import-self", # PLW0406
"inconsistent-quotes", # Q000
"invalid-envvar-default", # PLW1508
"keyword-arg-before-vararg", # B026
"logging-format-interpolation", # G
"logging-fstring-interpolation", # G
"logging-not-lazy", # G
"misplaced-future", # F404
"named-expr-without-context", # PLW0131
"nested-min-max", # PLW3301
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY203
"unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507
"unused-format-string-key", # F504
"unused-import", # F401
"unused-variable", # F841
"useless-else-on-loop", # PLW0120
"wildcard-import", # F403
"bad-classmethod-argument", # N804
"consider-iterating-dictionary", # SIM118
"empty-docstring", # D419
"invalid-name", # N815
"line-too-long", # E501, disabled globally
"missing-class-docstring", # D101
"missing-final-newline", # W292
"missing-function-docstring", # D103
"missing-module-docstring", # D100
"multiple-imports", #E401
"singleton-comparison", # E711, E712
"subprocess-run-check", # PLW1510
"superfluous-parens", # UP034
"ungrouped-imports", # I001
"unidiomatic-typecheck", # E721
"unnecessary-direct-lambda-call", # PLC3002
"unnecessary-lambda-assignment", # PLC3001
"unneeded-not", # SIM208
"useless-import-alias", # PLC0414
"wrong-import-order", # I001
"wrong-import-position", # E402
"comparison-of-constants", # PLR0133
"comparison-with-itself", # PLR0124
# "consider-alternative-union-syntax", # UP007, typing extension
"consider-merging-isinstance", # PLR1701
# "consider-using-alias", # UP006, typing extension
"consider-using-dict-comprehension", # C402
"consider-using-generator", # C417
"consider-using-get", # SIM401
"consider-using-set-comprehension", # C401
"consider-using-sys-exit", # PLR1722
"consider-using-ternary", # SIM108
"literal-comparison", # F632
"property-with-parameters", # PLR0206
"super-with-arguments", # UP008
"too-many-branches", # PLR0912
"too-many-return-statements", # PLR0911
"too-many-statements", # PLR0915
"trailing-comma-tuple", # COM818
"unnecessary-comprehension", # C416
"use-a-generator", # C417
"use-dict-literal", # C406
"use-list-literal", # C405
"useless-object-inheritance", # UP004
"useless-return", # PLR1711
# "no-self-use", # PLR6301 # Optional plugin, not enabled
]
[tool.pylint.REPORTS]
score = false
[tool.pylint.TYPECHECK]
ignored-modules = ["distutils"]
[tool.pylint.FORMAT]
expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.DESIGN]
max-positional-arguments = 10
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_default_fixture_loop_scope = "function"
asyncio_mode = "auto"
filterwarnings = [
"error",
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
"ignore::pytest.PytestUnraisableExceptionWarning",
]
markers = [
"no_mock_init_websession: disable the autouse mock of init_websession for this test",
]
[tool.ruff]
lint.select = [
"B002", # Python does not support the unary prefix increment
"B007", # Loop control variable {name} not used within loop body
"B014", # Exception handler with duplicate exception
"B023", # Function definition does not bind loop variable {name}
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"B904", # Use raise from to specify exception cause
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
"E", # pycodestyle
"F", # pyflakes/autoflake
"G", # flake8-logging-format
"I", # isort
"ICN001", # import concentions; {name} should be imported as {asname}
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
"PLE", # pylint
"PLR", # pylint
"PLW", # pylint
"Q000", # Double quotes found but single quotes preferred
"RUF006", # Store a reference to the return value of asyncio.create_task
"S102", # Use of exec detected
"S103", # bad-file-permissions
"S108", # hardcoded-temp-file
"S306", # suspicious-mktemp-usage
"S307", # suspicious-eval-usage
"S313", # suspicious-xmlc-element-tree-usage
"S314", # suspicious-xml-element-tree-usage
"S315", # suspicious-xml-expat-reader-usage
"S316", # suspicious-xml-expat-builder-usage
"S317", # suspicious-xml-sax-usage
"S318", # suspicious-xml-mini-dom-usage
"S319", # suspicious-xml-pull-dom-usage
"S601", # paramiko-call
"S602", # subprocess-popen-with-shell-equals-true
"S604", # call-with-shell-equals-true
"S608", # hardcoded-sql-expression
"S609", # unix-command-wildcard-injection
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
"SIM117", # Merge with-statements that use the same scope
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
"SIM201", # Use {left} != {right} instead of not {left} == {right}
"SIM208", # Use {expr} instead of not (not {expr})
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
"SIM401", # Use get from dict with default instead of an if block
"T100", # Trace found: {name} used
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY203", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
lint.ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
"D406", # Section name should end with a newline
"D407", # Section name underlining
"E501", # line too long
"E731", # do not assign a lambda expression, use a def
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
# be ignored anymore without warnings.
# https://github.com/astral-sh/ruff/issues/7491
# "PLC1901", # Lots of false positives
# False positives https://github.com/astral-sh/ruff/issues/5386
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
"PLR0911", # Too many return statements ({returns} > {max_returns})
"PLR0912", # Too many branches ({branches} > {max_branches})
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
"PLR0915", # Too many statements ({statements} > {max_statements})
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"UP006", # keep type annotation style as is
"UP007", # keep type annotation style as is
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Disabled because ruff does not understand type of __all__ generated by a function
"PLE0605",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.lint.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.lint.isort]
force-sort-within-sections = true
section-order = [
"future",
"standard-library",
"third-party",
"first-party",
"local-folder",
]
forced-separate = ["tests"]
known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.lint.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.lint.mccabe]
max-complexity = 25

2
pytest.ini Normal file
View File

@ -0,0 +1,2 @@
[pytest]
asyncio_mode = auto

View File

@ -1,30 +1,26 @@
aiodns==3.5.0 aiodns==3.0.0
aiohttp==3.12.14 aiohttp==3.8.5
async_timeout==4.0.3
atomicwrites-homeassistant==1.4.1 atomicwrites-homeassistant==1.4.1
attrs==25.3.0 attrs==23.1.0
awesomeversion==25.5.0 awesomeversion==23.8.0
blockbuster==1.5.25 brotli==1.0.9
brotli==1.1.0 ciso8601==2.3.0
ciso8601==2.3.2 colorlog==6.7.0
colorlog==6.9.0 cpe==1.2.1
cpe==1.3.1 cryptography==41.0.3
cryptography==45.0.5 debugpy==1.6.7
debugpy==1.8.15 deepmerge==1.1.0
deepmerge==2.0 dirhash==0.2.1
dirhash==0.5.0 docker==6.1.3
docker==7.1.0
faust-cchardet==2.1.19 faust-cchardet==2.1.19
gitpython==3.1.44 gitpython==3.1.34
jinja2==3.1.6 jinja2==3.1.2
log-rate-limit==1.4.2 pulsectl==23.5.2
orjson==3.11.0 pyudev==0.24.1
pulsectl==24.12.0 ruamel.yaml==0.17.21
pyudev==0.24.3 securetar==2023.3.0
PyYAML==6.0.2 sentry-sdk==1.30.0
requests==2.32.4 voluptuous==0.13.1
securetar==2025.2.1 dbus-fast==1.94.1
sentry-sdk==2.33.2 typing_extensions==4.7.1
setuptools==80.9.0
voluptuous==0.15.2
dbus-fast==2.44.2
zlib-fast==0.2.1

View File

@ -1,16 +1,16 @@
astroid==3.3.11 black==23.7.0
coverage==7.9.2 coverage==7.3.0
mypy==1.17.0 flake8-docstrings==1.7.0
pre-commit==4.2.0 flake8==6.1.0
pylint==3.3.7 pre-commit==3.4.0
pytest-aiohttp==1.1.0 pydocstyle==6.3.0
pytest-asyncio==0.25.2 pylint==2.17.5
pytest-cov==6.2.1 pytest-aiohttp==1.0.4
pytest-timeout==2.4.0 pytest-asyncio==0.18.3
pytest==8.4.1 pytest-cov==4.1.0
ruff==0.12.4 pytest-timeout==2.1.0
time-machine==2.16.0 pytest==7.4.1
types-docker==7.1.0.20250705 pyupgrade==3.10.1
types-pyyaml==6.0.12.20250516 time-machine==2.12.0
types-requests==2.32.4.20250611 typing_extensions==4.7.1
urllib3==2.5.0 urllib3==2.0.4

View File

@ -15,7 +15,7 @@ do
if [[ "${supervisor_state}" = "running" ]]; then if [[ "${supervisor_state}" = "running" ]]; then
# Check API # Check API
if bashio::supervisor.ping > /dev/null; then if bashio::supervisor.ping; then
failed_count=0 failed_count=0
else else
bashio::log.warning "Maybe found an issue on API healthy" bashio::log.warning "Maybe found an issue on API healthy"

View File

@ -1,30 +0,0 @@
#!/usr/bin/env sh
set -eu
# Used in venv activate script.
# Would be an error if undefined.
OSTYPE="${OSTYPE-}"
# Activate pyenv and virtualenv if present, then run the specified command
# pyenv, pyenv-virtualenv
if [ -s .python-version ]; then
PYENV_VERSION=$(head -n 1 .python-version)
export PYENV_VERSION
fi
if [ -n "${VIRTUAL_ENV-}" ] && [ -f "${VIRTUAL_ENV}/bin/activate" ]; then
. "${VIRTUAL_ENV}/bin/activate"
else
# other common virtualenvs
my_path=$(git rev-parse --show-toplevel)
for venv in venv .venv .; do
if [ -f "${my_path}/${venv}/bin/activate" ]; then
. "${my_path}/${venv}/bin/activate"
break
fi
done
fi
exec "$@"

30
scripts/update-frontend.sh Executable file
View File

@ -0,0 +1,30 @@
#!/bin/bash
source "/etc/supervisor_scripts/common"
set -e
# Update frontend
git submodule update --init --recursive --remote
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
cd home-assistant-polymer
nvm install
script/bootstrap
# Download translations
start_docker
./script/translations_download
# build frontend
cd hassio
./script/build_hassio
# Copy frontend
rm -rf ../../supervisor/api/panel/*
cp -rf build/* ../../supervisor/api/panel/
# Reset frontend git
cd ..
git reset --hard HEAD
stop_docker

31
setup.cfg Normal file
View File

@ -0,0 +1,31 @@
[isort]
multi_line_output = 3
include_trailing_comma=True
force_grid_wrap=0
line_length=88
indent = " "
force_sort_within_sections = true
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
default_section = THIRDPARTY
forced_separate = tests
combine_as_imports = true
use_parentheses = true
known_first_party = supervisor,tests
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True
max-line-length = 88
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# W504 line break after binary operator
ignore =
E501,
W503,
E203,
D202,
W504
per-file-ignores =
tests/dbus_service_mocks/*.py: F821,F722

View File

@ -1,28 +1,60 @@
"""Home Assistant Supervisor setup.""" """Home Assistant Supervisor setup."""
from pathlib import Path
import re
from setuptools import setup from setuptools import setup
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$") from supervisor.const import SUPERVISOR_VERSION
SUPERVISOR_DIR = Path(__file__).parent
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
def _get_supervisor_version():
for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1)
return "9999.09.9.dev9999"
setup( setup(
version=_get_supervisor_version(), name="Supervisor",
dependencies=REQUIREMENTS.split("/n"), version=SUPERVISOR_VERSION,
license="BSD License",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
url="https://home-assistant.io/",
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
long_description=(
"A maintainless private cloud operator system that"
"setup a Home-Assistant instance. Based on HassOS"
),
classifiers=[
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Home Automation",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering :: Atmospheric Science",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.8",
],
keywords=["docker", "home-assistant", "api"],
zip_safe=False,
platforms="any",
packages=[
"supervisor.addons",
"supervisor.api",
"supervisor.backups",
"supervisor.dbus.network",
"supervisor.dbus.network.setting",
"supervisor.dbus",
"supervisor.discovery.services",
"supervisor.discovery",
"supervisor.docker",
"supervisor.homeassistant",
"supervisor.host",
"supervisor.jobs",
"supervisor.misc",
"supervisor.plugins",
"supervisor.resolution.checks",
"supervisor.resolution.evaluations",
"supervisor.resolution.fixups",
"supervisor.resolution",
"supervisor.security",
"supervisor.services.modules",
"supervisor.services",
"supervisor.store",
"supervisor.utils",
"supervisor",
],
include_package_data=True,
) )

View File

@ -1,22 +1,11 @@
"""Main file for Supervisor.""" """Main file for Supervisor."""
import asyncio import asyncio
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
import logging import logging
from pathlib import Path from pathlib import Path
import sys import sys
import zlib_fast from supervisor import bootstrap
# Enable fast zlib before importing supervisor
zlib_fast.enable()
# pylint: disable=wrong-import-position
from supervisor import bootstrap # noqa: E402
from supervisor.utils.blockbuster import BlockBusterManager # noqa: E402
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
# pylint: enable=wrong-import-position
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -49,16 +38,13 @@ if __name__ == "__main__":
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor) loop.set_default_executor(executor)
activate_log_queue_handler()
_LOGGER.info("Initializing Supervisor setup") _LOGGER.info("Initializing Supervisor setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys()) coresys = loop.run_until_complete(bootstrap.initialize_coresys())
loop.set_debug(coresys.config.debug) loop.set_debug(coresys.config.debug)
if coresys.config.detect_blocking_io:
BlockBusterManager.activate()
loop.run_until_complete(coresys.core.connect()) loop.run_until_complete(coresys.core.connect())
loop.run_until_complete(bootstrap.supervisor_debugger(coresys)) bootstrap.supervisor_debugger(coresys)
bootstrap.migrate_system_env(coresys)
# Signal health startup for container # Signal health startup for container
run_os_startup_check_cleanup() run_os_startup_check_cleanup()
@ -66,15 +52,8 @@ if __name__ == "__main__":
_LOGGER.info("Setting up Supervisor") _LOGGER.info("Setting up Supervisor")
loop.run_until_complete(coresys.core.setup()) loop.run_until_complete(coresys.core.setup())
bootstrap.register_signal_handlers(loop, coresys) loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, coresys)
try:
loop.run_until_complete(coresys.core.start())
except Exception as err: # pylint: disable=broad-except
# Supervisor itself is running at this point, just something didn't
# start as expected. Log with traceback to get more insights for
# such cases.
_LOGGER.critical("Supervisor start failed: %s", err, exc_info=True)
try: try:
_LOGGER.info("Running Supervisor") _LOGGER.info("Running Supervisor")

View File

@ -1 +1,477 @@
"""Init file for Supervisor add-ons.""" """Init file for Supervisor add-ons."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
import logging
import tarfile
from typing import Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HomeAssistantAPIError,
HostAppArmorError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
tasks = []
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(self.sys_create_task(addon.load()))
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks:
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
# Config.wait_boot is deprecated. Until addons update with healthchecks,
# add a sleep task for it to keep the same minimum amount of wait time
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
for addon in tasks:
try:
if start_task := await addon.start():
wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except Exception as err: # pylint: disable=broad-except
capture_exception(err)
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
capture_exception(err)
@Job(
name="addon_manager_install",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
self.sys_jobs.current.reference = slug
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
self.data.install(store)
addon = Addon(self.coresys, slug)
await addon.load()
if not addon.path_data.is_dir():
_LOGGER.info(
"Creating Home Assistant add-on data folder %s", addon.path_data
)
addon.path_data.mkdir()
# Setup/Fix AppArmor profile
await addon.install_apparmor()
try:
await addon.instance.install(store.version, store.image, arch=addon.arch)
except DockerError as err:
self.data.uninstall(addon)
raise AddonsError() from err
self.local[slug] = addon
# Reload ingress tokens
if addon.with_ingress:
await self.sys_ingress.reload()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
addon = self.local[slug]
try:
await addon.instance.remove()
except DockerError as err:
raise AddonsError() from err
addon.state = AddonState.UNKNOWN
await addon.unload()
# Cleanup audio settings
if addon.path_pulse.exists():
with suppress(OSError):
addon.path_pulse.unlink()
# Cleanup AppArmor profile
with suppress(HostAppArmorError):
await addon.uninstall_apparmor()
# Cleanup Ingress panel from sidebar
if addon.ingress_panel:
addon.ingress_panel = False
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
# Cleanup Ingress dynamic port assignment
if addon.with_ingress:
self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(slug)
# Cleanup discovery data
for message in self.sys_discovery.list_messages:
if message.addon != addon.slug:
continue
self.sys_discovery.remove(message)
# Cleanup services data
for service in self.sys_services.list_services:
if addon.slug not in service.active:
continue
service.del_service_data(addon)
self.data.uninstall(addon)
self.local.pop(slug)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
name="addon_manager_update",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def update(
self, slug: str, backup: bool | None = False
) -> Awaitable[None] | None:
"""Update add-on.
Returns a coroutine that completes when addon has state 'started' (see addon.start)
if addon is started after update. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
# Update instance
last_state: AddonState = addon.state
old_image = addon.image
try:
await addon.instance.update(store.version, store.image)
except DockerError as err:
raise AddonsError() from err
_LOGGER.info("Add-on '%s' successfully updated", slug)
self.data.update(store)
# Cleanup
with suppress(DockerError):
await addon.instance.cleanup(old_image=old_image)
# Setup/Fix AppArmor profile
await addon.install_apparmor()
# restore state
return (
await addon.start()
if last_state in [AddonState.STARTED, AddonState.STARTUP]
else None
)
@Job(
name="addon_manager_rebuild",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> Awaitable[None] | None:
"""Perform a rebuild of local build add-on.
Returns a coroutine that completes when addon has state 'started' (see addon.start)
if addon is started after rebuild. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
# remove docker container but not addon config
last_state: AddonState = addon.state
try:
await addon.instance.remove()
await addon.instance.install(addon.version)
except DockerError as err:
raise AddonsError() from err
self.data.update(store)
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
# restore state
return (
await addon.start()
if last_state in [AddonState.STARTED, AddonState.STARTUP]
else None
)
@Job(
name="addon_manager_restore",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(
self, slug: str, tar_file: tarfile.TarFile
) -> Awaitable[None] | None:
"""Restore state of an add-on.
Returns a coroutine that completes when addon has state 'started' (see addon.start)
if addon is started after restore. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
wait_for_start = await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if addon.with_ingress:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
return wait_for_start
@Job(
name="addon_manager_repair",
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
)
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
add_host_coros: list[Awaitable[None]] = []
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
)
await asyncio.gather(*add_host_coros)
# Write hosts files
with suppress(CoreDNSError):
await self.sys_plugins.dns.write_hosts()

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,9 @@
"""Supervisor add-on build environment.""" """Supervisor add-on build environment."""
from __future__ import annotations from __future__ import annotations
from functools import cached_property from functools import cached_property
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING
from awesomeversion import AwesomeVersion from awesomeversion import AwesomeVersion
@ -15,7 +14,6 @@ from ..const import (
ATTR_SQUASH, ATTR_SQUASH,
FILE_SUFFIX_CONFIGURATION, FILE_SUFFIX_CONFIGURATION,
META_ADDON, META_ADDON,
SOCKET_DOCKER,
) )
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..docker.interface import MAP_ARCH from ..docker.interface import MAP_ARCH
@ -24,7 +22,7 @@ from ..utils.common import FileConfiguration, find_one_filetype
from .validate import SCHEMA_BUILD_CONFIG from .validate import SCHEMA_BUILD_CONFIG
if TYPE_CHECKING: if TYPE_CHECKING:
from .manager import AnyAddon from . import AnyAddon
class AddonBuild(FileConfiguration, CoreSysAttributes): class AddonBuild(FileConfiguration, CoreSysAttributes):
@ -35,36 +33,23 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
self.coresys: CoreSys = coresys self.coresys: CoreSys = coresys
self.addon = addon self.addon = addon
# Search for build file later in executor
super().__init__(None, SCHEMA_BUILD_CONFIG)
def _get_build_file(self) -> Path:
"""Get build file.
Must be run in executor.
"""
try: try:
return find_one_filetype( build_file = find_one_filetype(
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
) )
except ConfigurationFileError: except ConfigurationFileError:
return self.addon.path_location / "build.json" build_file = self.addon.path_location / "build.json"
async def read_data(self) -> None: super().__init__(build_file, SCHEMA_BUILD_CONFIG)
"""Load data from file."""
if not self._file:
self._file = await self.sys_run_in_executor(self._get_build_file)
await super().read_data() def save_data(self):
async def save_data(self):
"""Ignore save function.""" """Ignore save function."""
raise RuntimeError() raise RuntimeError()
@cached_property @cached_property
def arch(self) -> str: def arch(self) -> str:
"""Return arch of the add-on.""" """Return arch of the add-on."""
return self.sys_arch.match([self.addon.arch]) return self.sys_arch.match(self.addon.arch)
@property @property
def base_image(self) -> str: def base_image(self) -> str:
@ -82,6 +67,13 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
) )
return self._data[ATTR_BUILD_FROM][self.arch] return self._data[ATTR_BUILD_FROM][self.arch]
@property
def dockerfile(self) -> Path:
"""Return Dockerfile path."""
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
@property @property
def squash(self) -> bool: def squash(self) -> bool:
"""Return True or False if squash is active.""" """Return True or False if squash is active."""
@ -97,89 +89,49 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
"""Return additional Docker labels.""" """Return additional Docker labels."""
return self._data[ATTR_LABELS] return self._data[ATTR_LABELS]
def get_dockerfile(self) -> Path: @property
"""Return Dockerfile path. def is_valid(self) -> bool:
Must be run in executor.
"""
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
async def is_valid(self) -> bool:
"""Return true if the build env is valid.""" """Return true if the build env is valid."""
try:
def build_is_valid() -> bool:
return all( return all(
[ [
self.addon.path_location.is_dir(), self.addon.path_location.is_dir(),
self.get_dockerfile().is_file(), self.dockerfile.is_file(),
] ]
) )
try:
return await self.sys_run_in_executor(build_is_valid)
except HassioArchNotFound: except HassioArchNotFound:
return False return False
def get_docker_args( def get_docker_args(self, version: AwesomeVersion):
self, version: AwesomeVersion, image_tag: str """Create a dict with Docker build arguments."""
) -> dict[str, Any]: args = {
"""Create a dict with Docker run args.""" "path": str(self.addon.path_location),
dockerfile_path = self.get_dockerfile().relative_to(self.addon.path_location) "tag": f"{self.addon.image}:{version!s}",
"dockerfile": str(self.dockerfile),
build_cmd = [ "pull": True,
"docker", "forcerm": not self.sys_dev,
"buildx", "squash": self.squash,
"build", "platform": MAP_ARCH[self.arch],
".", "labels": {
"--tag", "io.hass.version": version,
image_tag, "io.hass.arch": self.arch,
"--file", "io.hass.type": META_ADDON,
str(dockerfile_path), "io.hass.name": self._fix_label("name"),
"--platform", "io.hass.description": self._fix_label("description"),
MAP_ARCH[self.arch], **self.additional_labels,
"--pull", },
] "buildargs": {
"BUILD_FROM": self.base_image,
labels = { "BUILD_VERSION": version,
"io.hass.version": version, "BUILD_ARCH": self.sys_arch.default,
"io.hass.arch": self.arch, **self.additional_args,
"io.hass.type": META_ADDON, },
"io.hass.name": self._fix_label("name"),
"io.hass.description": self._fix_label("description"),
**self.additional_labels,
} }
if self.addon.url: if self.addon.url:
labels["io.hass.url"] = self.addon.url args["labels"]["io.hass.url"] = self.addon.url
for key, value in labels.items(): return args
build_cmd.extend(["--label", f"{key}={value}"])
build_args = {
"BUILD_FROM": self.base_image,
"BUILD_VERSION": version,
"BUILD_ARCH": self.sys_arch.default,
**self.additional_args,
}
for key, value in build_args.items():
build_cmd.extend(["--build-arg", f"{key}={value}"])
# The addon path will be mounted from the host system
addon_extern_path = self.sys_config.local_to_extern_path(
self.addon.path_location
)
return {
"command": build_cmd,
"volumes": {
SOCKET_DOCKER: {"bind": "/var/run/docker.sock", "mode": "rw"},
addon_extern_path: {"bind": "/addon", "mode": "ro"},
},
"working_dir": "/addon",
}
def _fix_label(self, label_name: str) -> str: def _fix_label(self, label_name: str) -> str:
"""Remove characters they are not supported.""" """Remove characters they are not supported."""

View File

@ -1,11 +0,0 @@
"""Confgiuration Objects for Addon Config."""
from dataclasses import dataclass
@dataclass(slots=True)
class FolderMapping:
"""Represent folder mapping configuration."""
path: str | None
read_only: bool

View File

@ -1,5 +1,4 @@
"""Add-on static data.""" """Add-on static data."""
from datetime import timedelta from datetime import timedelta
from enum import StrEnum from enum import StrEnum
@ -13,26 +12,8 @@ class AddonBackupMode(StrEnum):
COLD = "cold" COLD = "cold"
class MappingType(StrEnum):
"""Mapping type of an Add-on Folder."""
DATA = "data"
CONFIG = "config"
SSL = "ssl"
ADDONS = "addons"
BACKUP = "backup"
SHARE = "share"
MEDIA = "media"
HOMEASSISTANT_CONFIG = "homeassistant_config"
ALL_ADDON_CONFIGS = "all_addon_configs"
ADDON_CONFIG = "addon_config"
ATTR_BACKUP = "backup" ATTR_BACKUP = "backup"
ATTR_BREAKING_VERSIONS = "breaking_versions"
ATTR_CODENOTARY = "codenotary" ATTR_CODENOTARY = "codenotary"
ATTR_READ_ONLY = "read_only"
ATTR_PATH = "path"
WATCHDOG_RETRY_SECONDS = 10 WATCHDOG_RETRY_SECONDS = 10
WATCHDOG_MAX_ATTEMPTS = 5 WATCHDOG_MAX_ATTEMPTS = 5
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30) WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor add-on data.""" """Init file for Supervisor add-on data."""
from copy import deepcopy from copy import deepcopy
from typing import Any from typing import Any
@ -38,7 +37,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
"""Return local add-on data.""" """Return local add-on data."""
return self._data[ATTR_SYSTEM] return self._data[ATTR_SYSTEM]
async def install(self, addon: AddonStore) -> None: def install(self, addon: AddonStore) -> None:
"""Set addon as installed.""" """Set addon as installed."""
self.system[addon.slug] = deepcopy(addon.data) self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug] = { self.user[addon.slug] = {
@ -46,28 +45,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
ATTR_VERSION: addon.version, ATTR_VERSION: addon.version,
ATTR_IMAGE: addon.image, ATTR_IMAGE: addon.image,
} }
await self.save_data() self.save_data()
async def uninstall(self, addon: Addon) -> None: def uninstall(self, addon: Addon) -> None:
"""Set add-on as uninstalled.""" """Set add-on as uninstalled."""
self.system.pop(addon.slug, None) self.system.pop(addon.slug, None)
self.user.pop(addon.slug, None) self.user.pop(addon.slug, None)
await self.save_data() self.save_data()
async def update(self, addon: AddonStore) -> None: def update(self, addon: AddonStore) -> None:
"""Update version of add-on.""" """Update version of add-on."""
self.system[addon.slug] = deepcopy(addon.data) self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug].update( self.user[addon.slug].update(
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image} {ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
) )
await self.save_data() self.save_data()
async def restore( def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
self, slug: str, user: Config, system: Config, image: str
) -> None:
"""Restore data to add-on.""" """Restore data to add-on."""
self.user[slug] = deepcopy(user) self.user[slug] = deepcopy(user)
self.system[slug] = deepcopy(system) self.system[slug] = deepcopy(system)
self.user[slug][ATTR_IMAGE] = image self.user[slug][ATTR_IMAGE] = image
await self.save_data() self.save_data()

View File

@ -1,408 +0,0 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
import logging
import tarfile
from typing import Self, Union
from attr import evolve
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerError,
HassioError,
HomeAssistantAPIError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils.sentry import async_capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def get_local_only(self, addon_slug: str) -> Addon | None:
"""Return an installed add-on from slug."""
return self.local.get(addon_slug)
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load_config(self) -> Self:
"""Load config in executor."""
await self.data.read_data()
return self
async def load(self) -> None:
"""Start up add-on management."""
# Refresh cache for all store addons
tasks: list[Awaitable[None]] = [
store.refresh_path_cache() for store in self.store.values()
]
# Load all installed addons
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
if tasks:
await asyncio.gather(*tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
# Config.wait_boot is deprecated. Until addons update with healthchecks,
# add a sleep task for it to keep the same minimum amount of wait time
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
for addon in tasks:
try:
if start_task := await addon.start():
wait_boot.append(start_task)
except HassioError:
self.sys_resolution.add_issue(
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
# After waiting for startup, create an issue for boot addons that are error or unknown state
# Ignore stopped as single shot addons can be run at boot and this is successful exit
# Timeout waiting for startup is not a failure, addon is probably just slow
for addon in tasks:
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
self.sys_resolution.add_issue(
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
await async_capture_exception(err)
@Job(
name="addon_manager_install",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
self.sys_jobs.current.reference = slug
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
await Addon(self.coresys, slug).install()
_LOGGER.info("Add-on '%s' successfully installed", slug)
@Job(name="addon_manager_uninstall")
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
shared_image = any(
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
name="addon_manager_update",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def update(
self, slug: str, backup: bool | None = False
) -> asyncio.Task | None:
"""Update add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after update. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
return await addon.update()
@Job(
name="addon_manager_rebuild",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str, *, force: bool = False) -> asyncio.Task | None:
"""Perform a rebuild of local build add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after rebuild. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not force and not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
return await addon.rebuild()
@Job(
name="addon_manager_restore",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(
self, slug: str, tar_file: tarfile.TarFile
) -> asyncio.Task | None:
"""Restore state of an add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after restore. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
had_ingress: bool | None = False
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
had_ingress = addon.ingress_panel
wait_for_start = await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if had_ingress != addon.ingress_panel:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
return wait_for_start
@Job(
name="addon_manager_repair",
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
)
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
add_host_coros: list[Awaitable[None]] = []
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
await async_capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
)
await asyncio.gather(*add_host_coros)
# Write hosts files
with suppress(CoreDNSError):
await self.sys_plugins.dns.write_hosts()

View File

@ -1,18 +1,14 @@
"""Init file for Supervisor add-ons.""" """Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections import defaultdict from collections import defaultdict
from collections.abc import Awaitable, Callable from collections.abc import Awaitable, Callable
from contextlib import suppress from contextlib import suppress
from datetime import datetime
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from awesomeversion import AwesomeVersion, AwesomeVersionException from awesomeversion import AwesomeVersion, AwesomeVersionException
from supervisor.utils.dt import utc_from_timestamp
from ..const import ( from ..const import (
ATTR_ADVANCED, ATTR_ADVANCED,
ATTR_APPARMOR, ATTR_APPARMOR,
@ -47,7 +43,7 @@ from ..const import (
ATTR_JOURNALD, ATTR_JOURNALD,
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LEGACY, ATTR_LEGACY,
ATTR_LOCATION, ATTR_LOCATON,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAP, ATTR_MAP,
ATTR_NAME, ATTR_NAME,
@ -69,13 +65,11 @@ from ..const import (
ATTR_TIMEOUT, ATTR_TIMEOUT,
ATTR_TMPFS, ATTR_TMPFS,
ATTR_TRANSLATIONS, ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART, ATTR_UART,
ATTR_UDEV, ATTR_UDEV,
ATTR_URL, ATTR_URL,
ATTR_USB, ATTR_USB,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_VIDEO, ATTR_VIDEO,
ATTR_WATCHDOG, ATTR_WATCHDOG,
ATTR_WEBUI, ATTR_WEBUI,
@ -83,7 +77,6 @@ from ..const import (
SECURITY_DISABLE, SECURITY_DISABLE,
SECURITY_PROFILE, SECURITY_PROFILE,
AddonBoot, AddonBoot,
AddonBootConfig,
AddonStage, AddonStage,
AddonStartup, AddonStartup,
) )
@ -92,19 +85,9 @@ from ..docker.const import Capabilities
from ..exceptions import AddonsNotSupportedError from ..exceptions import AddonsNotSupportedError
from ..jobs.const import JOB_GROUP_ADDON from ..jobs.const import JOB_GROUP_ADDON
from ..jobs.job_group import JobGroup from ..jobs.job_group import JobGroup
from ..utils import version_is_new_enough from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
from .configuration import FolderMapping
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
AddonBackupMode,
MappingType,
)
from .options import AddonOptions, UiOptions from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE from .validate import RE_SERVICE, RE_VOLUME
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -120,10 +103,6 @@ class AddonModel(JobGroup, ABC):
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
) )
self.slug: str = slug self.slug: str = slug
self._path_icon_exists: bool = False
self._path_logo_exists: bool = False
self._path_changelog_exists: bool = False
self._path_documentation_exists: bool = False
@property @property
@abstractmethod @abstractmethod
@ -150,15 +129,10 @@ class AddonModel(JobGroup, ABC):
"""Return options with local changes.""" """Return options with local changes."""
return self.data[ATTR_OPTIONS] return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
return self.data[ATTR_BOOT]
@property @property
def boot(self) -> AddonBoot: def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced.""" """Return boot config with prio local settings."""
return AddonBoot(self.data[ATTR_BOOT]) return self.data[ATTR_BOOT]
@property @property
def auto_update(self) -> bool | None: def auto_update(self) -> bool | None:
@ -210,6 +184,18 @@ class AddonModel(JobGroup, ABC):
"""Return description of add-on.""" """Return description of add-on."""
return self.data[ATTR_DESCRIPTON] return self.data[ATTR_DESCRIPTON]
@property
def long_description(self) -> str | None:
"""Return README.md as long_description."""
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
return readme.read_text(encoding="utf-8")
@property @property
def repository(self) -> str: def repository(self) -> str:
"""Return repository of add-on.""" """Return repository of add-on."""
@ -225,11 +211,6 @@ class AddonModel(JobGroup, ABC):
"""Return latest version of add-on.""" """Return latest version of add-on."""
return self.data[ATTR_VERSION] return self.data[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
@property @property
def version(self) -> AwesomeVersion: def version(self) -> AwesomeVersion:
"""Return version of add-on.""" """Return version of add-on."""
@ -294,7 +275,7 @@ class AddonModel(JobGroup, ABC):
return self.data.get(ATTR_WEBUI) return self.data.get(ATTR_WEBUI)
@property @property
def watchdog_url(self) -> str | None: def watchdog(self) -> str | None:
"""Return URL to for watchdog or None.""" """Return URL to for watchdog or None."""
return self.data.get(ATTR_WATCHDOG) return self.data.get(ATTR_WATCHDOG)
@ -510,22 +491,22 @@ class AddonModel(JobGroup, ABC):
@property @property
def with_icon(self) -> bool: def with_icon(self) -> bool:
"""Return True if an icon exists.""" """Return True if an icon exists."""
return self._path_icon_exists return self.path_icon.exists()
@property @property
def with_logo(self) -> bool: def with_logo(self) -> bool:
"""Return True if a logo exists.""" """Return True if a logo exists."""
return self._path_logo_exists return self.path_logo.exists()
@property @property
def with_changelog(self) -> bool: def with_changelog(self) -> bool:
"""Return True if a changelog exists.""" """Return True if a changelog exists."""
return self._path_changelog_exists return self.path_changelog.exists()
@property @property
def with_documentation(self) -> bool: def with_documentation(self) -> bool:
"""Return True if a documentation exists.""" """Return True if a documentation exists."""
return self._path_documentation_exists return self.path_documentation.exists()
@property @property
def supported_arch(self) -> list[str]: def supported_arch(self) -> list[str]:
@ -556,20 +537,21 @@ class AddonModel(JobGroup, ABC):
return ATTR_IMAGE not in self.data return ATTR_IMAGE not in self.data
@property @property
def map_volumes(self) -> dict[MappingType, FolderMapping]: def map_volumes(self) -> dict[str, bool]:
"""Return a dict of {MappingType: FolderMapping} from add-on.""" """Return a dict of {volume: read-only} from add-on."""
volumes = {} volumes = {}
for volume in self.data[ATTR_MAP]: for volume in self.data[ATTR_MAP]:
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping( result = RE_VOLUME.match(volume)
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY] if not result:
) continue
volumes[result.group(1)] = result.group(2) != "rw"
return volumes return volumes
@property @property
def path_location(self) -> Path: def path_location(self) -> Path:
"""Return path to this add-on.""" """Return path to this add-on."""
return Path(self.data[ATTR_LOCATION]) return Path(self.data[ATTR_LOCATON])
@property @property
def path_icon(self) -> Path: def path_icon(self) -> Path:
@ -606,7 +588,7 @@ class AddonModel(JobGroup, ABC):
return AddonOptions(self.coresys, raw_schema, self.name, self.slug) return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
@property @property
def schema_ui(self) -> list[dict[Any, Any]] | None: def schema_ui(self) -> list[dict[any, any]] | None:
"""Create a UI schema for add-on options.""" """Create a UI schema for add-on options."""
raw_schema = self.data[ATTR_SCHEMA] raw_schema = self.data[ATTR_SCHEMA]
@ -629,51 +611,16 @@ class AddonModel(JobGroup, ABC):
"""Return Signer email address for CAS.""" """Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY) return self.data.get(ATTR_CODENOTARY)
@property
def breaking_versions(self) -> list[AwesomeVersion]:
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
async def long_description(self) -> str | None:
"""Return README.md as long_description."""
def read_readme() -> str | None:
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
return readme.read_text(encoding="utf-8")
return await self.sys_run_in_executor(read_readme)
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
def check_paths():
self._path_icon_exists = self.path_icon.exists()
self._path_logo_exists = self.path_logo.exists()
self._path_changelog_exists = self.path_changelog.exists()
self._path_documentation_exists = self.path_documentation.exists()
return self.sys_run_in_executor(check_paths)
def validate_availability(self) -> None: def validate_availability(self) -> None:
"""Validate if addon is available for current system.""" """Validate if addon is available for current system."""
return self._validate_availability(self.data, logger=_LOGGER.error) return self._validate_availability(self.data, logger=_LOGGER.error)
def __eq__(self, other: Any) -> bool: def __eq__(self, other):
"""Compare add-on objects.""" """Compaired add-on objects."""
if not isinstance(other, AddonModel): if not isinstance(other, AddonModel):
return False return False
return self.slug == other.slug return self.slug == other.slug
def __hash__(self) -> int:
"""Hash for add-on objects."""
return hash(self.slug)
def _validate_availability( def _validate_availability(
self, config, *, logger: Callable[..., None] | None = None self, config, *, logger: Callable[..., None] | None = None
) -> None: ) -> None:
@ -698,9 +645,7 @@ class AddonModel(JobGroup, ABC):
# Home Assistant # Home Assistant
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT) version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
with suppress(AwesomeVersionException, TypeError): with suppress(AwesomeVersionException, TypeError):
if version and not version_is_new_enough( if self.sys_homeassistant.version < version:
self.sys_homeassistant.version, version
):
raise AddonsNotSupportedError( raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater", f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
logger, logger,
@ -724,3 +669,19 @@ class AddonModel(JobGroup, ABC):
# local build # local build
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}" return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
def install(self) -> Awaitable[None]:
"""Install this add-on."""
return self.sys_addons.install(self.slug)
def uninstall(self) -> Awaitable[None]:
"""Uninstall this add-on."""
return self.sys_addons.uninstall(self.slug)
def update(self, backup: bool | None = False) -> Awaitable[Awaitable[None] | None]:
"""Update this add-on."""
return self.sys_addons.update(self.slug, backup=backup)
def rebuild(self) -> Awaitable[Awaitable[None] | None]:
"""Rebuild this add-on."""
return self.sys_addons.rebuild(self.slug)

View File

@ -1,5 +1,4 @@
"""Add-on Options / UI rendering.""" """Add-on Options / UI rendering."""
import hashlib import hashlib
import logging import logging
from pathlib import Path from pathlib import Path
@ -137,7 +136,7 @@ class AddonOptions(CoreSysAttributes):
) from None ) from None
# prepare range # prepare range
range_args: dict[str, Any] = {} range_args = {}
for group_name in _SCHEMA_LENGTH_PARTS: for group_name in _SCHEMA_LENGTH_PARTS:
group_value = match.group(group_name) group_value = match.group(group_name)
if group_value: if group_value:
@ -390,14 +389,14 @@ class UiOptions(CoreSysAttributes):
multiple: bool = False, multiple: bool = False,
) -> None: ) -> None:
"""UI nested dict items.""" """UI nested dict items."""
ui_node: dict[str, Any] = { ui_node = {
"name": key, "name": key,
"type": "schema", "type": "schema",
"optional": True, "optional": True,
"multiple": multiple, "multiple": multiple,
} }
nested_schema: list[dict[str, Any]] = [] nested_schema = []
for c_key, c_value in option_dict.items(): for c_key, c_value in option_dict.items():
# Nested? # Nested?
if isinstance(c_value, list): if isinstance(c_value, list):
@ -413,7 +412,7 @@ def _create_device_filter(str_filter: str) -> dict[str, Any]:
"""Generate device Filter.""" """Generate device Filter."""
raw_filter = dict(value.split("=") for value in str_filter.split(";")) raw_filter = dict(value.split("=") for value in str_filter.split(";"))
clean_filter: dict[str, Any] = {} clean_filter = {}
for key, value in raw_filter.items(): for key, value in raw_filter.items():
if key == "subsystem": if key == "subsystem":
clean_filter[key] = UdevSubsystem(value) clean_filter[key] = UdevSubsystem(value)

View File

@ -1,10 +1,9 @@
"""Util add-ons functions.""" """Util add-ons functions."""
from __future__ import annotations from __future__ import annotations
import asyncio
import logging import logging
from pathlib import Path from pathlib import Path
import subprocess
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
@ -46,7 +45,6 @@ def rating_security(addon: AddonModel) -> int:
privilege in addon.privileged privilege in addon.privileged
for privilege in ( for privilege in (
Capabilities.BPF, Capabilities.BPF,
Capabilities.CHECKPOINT_RESTORE,
Capabilities.DAC_READ_SEARCH, Capabilities.DAC_READ_SEARCH,
Capabilities.NET_ADMIN, Capabilities.NET_ADMIN,
Capabilities.NET_RAW, Capabilities.NET_RAW,
@ -86,20 +84,18 @@ def rating_security(addon: AddonModel) -> int:
return max(min(8, rating), 1) return max(min(8, rating), 1)
def remove_data(folder: Path) -> None: async def remove_data(folder: Path) -> None:
"""Remove folder and reset privileged. """Remove folder and reset privileged."""
Must be run in executor.
"""
try: try:
subprocess.run( proc = await asyncio.create_subprocess_exec(
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True "rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
) )
_, error_msg = await proc.communicate()
except OSError as err: except OSError as err:
error_msg = str(err) error_msg = str(err)
except subprocess.CalledProcessError as procerr:
error_msg = procerr.stderr.strip()
else: else:
return if proc.returncode == 0:
return
_LOGGER.error("Can't remove Add-on Data: %s", error_msg) _LOGGER.error("Can't remove Add-on Data: %s", error_msg)

View File

@ -1,5 +1,4 @@
"""Validate add-ons options schema.""" """Validate add-ons options schema."""
import logging import logging
import re import re
import secrets import secrets
@ -55,7 +54,7 @@ from ..const import (
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LABELS, ATTR_LABELS,
ATTR_LEGACY, ATTR_LEGACY,
ATTR_LOCATION, ATTR_LOCATON,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAP, ATTR_MAP,
ATTR_NAME, ATTR_NAME,
@ -79,12 +78,9 @@ from ..const import (
ATTR_STATE, ATTR_STATE,
ATTR_STDIN, ATTR_STDIN,
ATTR_SYSTEM, ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TIMEOUT, ATTR_TIMEOUT,
ATTR_TMPFS, ATTR_TMPFS,
ATTR_TRANSLATIONS, ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART, ATTR_UART,
ATTR_UDEV, ATTR_UDEV,
ATTR_URL, ATTR_URL,
@ -98,11 +94,11 @@ from ..const import (
ROLE_ALL, ROLE_ALL,
ROLE_DEFAULT, ROLE_DEFAULT,
AddonBoot, AddonBoot,
AddonBootConfig,
AddonStage, AddonStage,
AddonStartup, AddonStartup,
AddonState, AddonState,
) )
from ..discovery.validate import valid_discovery_service
from ..docker.const import Capabilities from ..docker.const import Capabilities
from ..validate import ( from ..validate import (
docker_image, docker_image,
@ -113,23 +109,12 @@ from ..validate import (
uuid_match, uuid_match,
version_tag, version_tag,
) )
from .const import ( from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
RE_SLUG,
AddonBackupMode,
MappingType,
)
from .options import RE_SCHEMA_ELEMENT from .options import RE_SCHEMA_ELEMENT
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
RE_VOLUME = re.compile( RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
)
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$") RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
@ -158,7 +143,6 @@ RE_MACHINE = re.compile(
r"|raspberrypi3" r"|raspberrypi3"
r"|raspberrypi4-64" r"|raspberrypi4-64"
r"|raspberrypi4" r"|raspberrypi4"
r"|raspberrypi5-64"
r"|yellow" r"|yellow"
r"|green" r"|green"
r"|tinker" r"|tinker"
@ -193,6 +177,20 @@ def _warn_addon_config(config: dict[str, Any]):
name, name,
) )
invalid_services: list[str] = []
for service in config.get(ATTR_DISCOVERY, []):
try:
valid_discovery_service(service)
except vol.Invalid:
invalid_services.append(service)
if invalid_services:
_LOGGER.warning(
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
", ".join(invalid_services),
name,
)
return config return config
@ -262,48 +260,6 @@ def _migrate_addon_config(protocol=False):
name, name,
) )
# 2023-11 "map" entries can also be dict to allow path configuration
volumes = []
for entry in config.get(ATTR_MAP, []):
if isinstance(entry, dict):
volumes.append(entry)
if isinstance(entry, str):
result = RE_VOLUME.match(entry)
if not result:
continue
volumes.append(
{
ATTR_TYPE: result.group(1),
ATTR_READ_ONLY: result.group(2) != "rw",
}
)
if volumes:
config[ATTR_MAP] = volumes
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
if any(
volume
and volume[ATTR_TYPE]
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
for volume in volumes
):
_LOGGER.warning(
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
MappingType.ADDON_CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
MappingType.CONFIG,
name,
)
else:
_LOGGER.debug(
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
MappingType.CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
name,
)
return config return config
return _migrate return _migrate
@ -322,9 +278,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce( vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup AddonStartup
), ),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce( vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
AddonBootConfig
),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(), vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(), vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage), vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
@ -354,15 +308,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_DEVICES): [str], vol.Optional(ATTR_DEVICES): [str],
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(), vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(), vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
vol.Optional(ATTR_MAP, default=list): [ vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Schema(
{
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
vol.Optional(ATTR_READ_ONLY, default=True): bool,
vol.Optional(ATTR_PATH): str,
}
)
],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str}, vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)], vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(), vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
@ -414,7 +360,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Coerce(int), vol.Range(min=10, max=300) vol.Coerce(int), vol.Range(min=10, max=300)
), ),
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(), vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
) )
@ -473,8 +418,6 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(), vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(), vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(), vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
}, },
extra=vol.REMOVE_EXTRA, extra=vol.REMOVE_EXTRA,
) )
@ -483,7 +426,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
_migrate_addon_config(), _migrate_addon_config(),
_SCHEMA_ADDON_CONFIG.extend( _SCHEMA_ADDON_CONFIG.extend(
{ {
vol.Required(ATTR_LOCATION): str, vol.Required(ATTR_LOCATON): str,
vol.Required(ATTR_REPOSITORY): str, vol.Required(ATTR_REPOSITORY): str,
vol.Required(ATTR_TRANSLATIONS, default=dict): { vol.Required(ATTR_TRANSLATIONS, default=dict): {
str: SCHEMA_ADDON_TRANSLATIONS str: SCHEMA_ADDON_TRANSLATIONS

View File

@ -1,23 +1,19 @@
"""Init file for Supervisor RESTful API.""" """Init file for Supervisor RESTful API."""
from dataclasses import dataclass
from functools import partial from functools import partial
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from aiohttp import hdrs, web from aiohttp import web
from ..const import SUPERVISOR_DOCKER_NAME, AddonState from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError from ..exceptions import APIAddonNotInstalled
from ..utils.sentry import async_capture_exception
from .addons import APIAddons from .addons import APIAddons
from .audio import APIAudio from .audio import APIAudio
from .auth import APIAuth from .auth import APIAuth
from .backups import APIBackups from .backups import APIBackups
from .cli import APICli from .cli import APICli
from .const import CONTENT_TYPE_TEXT
from .discovery import APIDiscovery from .discovery import APIDiscovery
from .dns import APICoreDNS from .dns import APICoreDNS
from .docker import APIDocker from .docker import APIDocker
@ -39,7 +35,7 @@ from .security import APISecurity
from .services import APIServices from .services import APIServices
from .store import APIStore from .store import APIStore
from .supervisor import APISupervisor from .supervisor import APISupervisor
from .utils import api_process, api_process_raw from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -48,14 +44,6 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
MAX_LINE_SIZE: int = 24570 MAX_LINE_SIZE: int = 24570
@dataclass(slots=True, frozen=True)
class StaticResourceConfig:
"""Configuration for a static resource."""
prefix: str
path: Path
class RestAPI(CoreSysAttributes): class RestAPI(CoreSysAttributes):
"""Handle RESTful API for Supervisor.""" """Handle RESTful API for Supervisor."""
@ -78,17 +66,11 @@ class RestAPI(CoreSysAttributes):
) )
# service stuff # service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5) self._runner: web.AppRunner = web.AppRunner(self.webapp)
self._site: web.TCPSite | None = None self._site: web.TCPSite | None = None
# share single host API handler for reuse in logging endpoints
self._api_host: APIHost = APIHost()
self._api_host.coresys = coresys
async def load(self) -> None: async def load(self) -> None:
"""Register REST API Calls.""" """Register REST API Calls."""
static_resource_configs: list[StaticResourceConfig] = []
self._register_addons() self._register_addons()
self._register_audio() self._register_audio()
self._register_auth() self._register_auth()
@ -107,7 +89,7 @@ class RestAPI(CoreSysAttributes):
self._register_network() self._register_network()
self._register_observer() self._register_observer()
self._register_os() self._register_os()
static_resource_configs.extend(self._register_panel()) self._register_panel()
self._register_proxy() self._register_proxy()
self._register_resolution() self._register_resolution()
self._register_root() self._register_root()
@ -116,54 +98,12 @@ class RestAPI(CoreSysAttributes):
self._register_store() self._register_store()
self._register_supervisor() self._register_supervisor()
if static_resource_configs:
def process_configs() -> list[web.StaticResource]:
return [
web.StaticResource(config.prefix, config.path)
for config in static_resource_configs
]
for resource in await self.sys_run_in_executor(process_configs):
self.webapp.router.register_resource(resource)
await self.start() await self.start()
def _register_advanced_logs(self, path: str, syslog_identifier: str):
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
self.webapp.add_routes(
[
web.get(
f"{path}/logs",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
web.get(
f"{path}/logs/boots/{{bootid}}",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/boots/{{bootid}}/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
]
)
def _register_host(self) -> None: def _register_host(self) -> None:
"""Register hostcontrol functions.""" """Register hostcontrol functions."""
api_host = self._api_host api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes( self.webapp.add_routes(
[ [
@ -237,21 +177,15 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/os/info", api_os.info), web.get("/os/info", api_os.info),
web.post("/os/update", api_os.update), web.post("/os/update", api_os.update),
web.get("/os/config/swap", api_os.config_swap_info),
web.post("/os/config/swap", api_os.config_swap_options),
web.post("/os/config/sync", api_os.config_sync), web.post("/os/config/sync", api_os.config_sync),
web.post("/os/datadisk/move", api_os.migrate_data), web.post("/os/datadisk/move", api_os.migrate_data),
web.get("/os/datadisk/list", api_os.list_data), web.get("/os/datadisk/list", api_os.list_data),
web.post("/os/datadisk/wipe", api_os.wipe_data),
web.post("/os/boot-slot", api_os.set_boot_slot),
] ]
) )
# Boards endpoints # Boards endpoints
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/os/boards/green", api_os.boards_green_info),
web.post("/os/boards/green", api_os.boards_green_options),
web.get("/os/boards/yellow", api_os.boards_yellow_info), web.get("/os/boards/yellow", api_os.boards_yellow_info),
web.post("/os/boards/yellow", api_os.boards_yellow_options), web.post("/os/boards/yellow", api_os.boards_yellow_options),
web.get("/os/boards/{board}", api_os.boards_other_info), web.get("/os/boards/{board}", api_os.boards_other_info),
@ -281,8 +215,6 @@ class RestAPI(CoreSysAttributes):
web.get("/jobs/info", api_jobs.info), web.get("/jobs/info", api_jobs.info),
web.post("/jobs/options", api_jobs.options), web.post("/jobs/options", api_jobs.options),
web.post("/jobs/reset", api_jobs.reset), web.post("/jobs/reset", api_jobs.reset),
web.get("/jobs/{uuid}", api_jobs.job_info),
web.delete("/jobs/{uuid}", api_jobs.remove_job),
] ]
) )
@ -321,11 +253,11 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/multicast/info", api_multicast.info), web.get("/multicast/info", api_multicast.info),
web.get("/multicast/stats", api_multicast.stats), web.get("/multicast/stats", api_multicast.stats),
web.get("/multicast/logs", api_multicast.logs),
web.post("/multicast/update", api_multicast.update), web.post("/multicast/update", api_multicast.update),
web.post("/multicast/restart", api_multicast.restart), web.post("/multicast/restart", api_multicast.restart),
] ]
) )
self._register_advanced_logs("/multicast", "hassio_multicast")
def _register_hardware(self) -> None: def _register_hardware(self) -> None:
"""Register hardware functions.""" """Register hardware functions."""
@ -345,9 +277,6 @@ class RestAPI(CoreSysAttributes):
api_root.coresys = self.coresys api_root.coresys = self.coresys
self.webapp.add_routes([web.get("/info", api_root.info)]) self.webapp.add_routes([web.get("/info", api_root.info)])
self.webapp.add_routes([web.post("/reload_updates", api_root.reload_updates)])
# Discouraged
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)]) self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
self.webapp.add_routes( self.webapp.add_routes(
[web.get("/available_updates", api_root.available_updates)] [web.get("/available_updates", api_root.available_updates)]
@ -401,7 +330,6 @@ class RestAPI(CoreSysAttributes):
web.post("/auth", api_auth.auth), web.post("/auth", api_auth.auth),
web.post("/auth/reset", api_auth.reset), web.post("/auth/reset", api_auth.reset),
web.delete("/auth/cache", api_auth.cache), web.delete("/auth/cache", api_auth.cache),
web.get("/auth/list", api_auth.list_users),
] ]
) )
@ -415,6 +343,7 @@ class RestAPI(CoreSysAttributes):
web.get("/supervisor/ping", api_supervisor.ping), web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info), web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats), web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update), web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload), web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/restart", api_supervisor.restart), web.post("/supervisor/restart", api_supervisor.restart),
@ -423,39 +352,6 @@ class RestAPI(CoreSysAttributes):
] ]
) )
async def get_supervisor_logs(*args, **kwargs):
try:
return await self._api_host.advanced_logs_handler(
*args, identifier=SUPERVISOR_DOCKER_NAME, **kwargs
)
except Exception as err: # pylint: disable=broad-exception-caught
# Supervisor logs are critical, so catch everything, log the exception
# and try to return Docker container logs as the fallback
_LOGGER.exception(
"Failed to get supervisor logs using advanced_logs API"
)
if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
await async_capture_exception(err)
kwargs.pop("follow", None) # Follow is not supported for Docker logs
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(
[
web.get("/supervisor/logs", get_supervisor_logs),
web.get(
"/supervisor/logs/follow",
partial(get_supervisor_logs, follow=True),
),
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
web.get(
"/supervisor/logs/boots/{bootid}/follow",
partial(get_supervisor_logs, follow=True),
),
]
)
def _register_homeassistant(self) -> None: def _register_homeassistant(self) -> None:
"""Register Home Assistant functions.""" """Register Home Assistant functions."""
api_hass = APIHomeAssistant() api_hass = APIHomeAssistant()
@ -464,6 +360,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/core/info", api_hass.info), web.get("/core/info", api_hass.info),
web.get("/core/logs", api_hass.logs),
web.get("/core/stats", api_hass.stats), web.get("/core/stats", api_hass.stats),
web.post("/core/options", api_hass.options), web.post("/core/options", api_hass.options),
web.post("/core/update", api_hass.update), web.post("/core/update", api_hass.update),
@ -475,12 +372,11 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/core", "homeassistant")
# Reroute from legacy # Reroute from legacy
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/homeassistant/info", api_hass.info), web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats), web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options), web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/restart", api_hass.restart), web.post("/homeassistant/restart", api_hass.restart),
@ -492,8 +388,6 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/homeassistant", "homeassistant")
def _register_proxy(self) -> None: def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy.""" """Register Home Assistant API Proxy."""
api_proxy = APIProxy() api_proxy = APIProxy()
@ -529,45 +423,24 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/addons", api_addons.list_addons), web.get("/addons", api_addons.list),
web.post("/addons/{addon}/uninstall", api_addons.uninstall), web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start), web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop), web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart), web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options), web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post( web.post(
"/addons/{addon}/options/validate", api_addons.options_validate "/addons/{addon}/options/validate", api_addons.options_validate
), ),
web.get("/addons/{addon}/options/config", api_addons.options_config), web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild), web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.post("/addons/{addon}/stdin", api_addons.stdin), web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security), web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats), web.get("/addons/{addon}/stats", api_addons.stats),
] ]
) )
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_addon_logs(request, *args, **kwargs):
addon = api_addons.get_addon_for_request(request)
kwargs["identifier"] = f"addon_{addon.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.webapp.add_routes(
[
web.get("/addons/{addon}/logs", get_addon_logs),
web.get(
"/addons/{addon}/logs/follow",
partial(get_addon_logs, follow=True),
),
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
web.get(
"/addons/{addon}/logs/boots/{bootid}/follow",
partial(get_addon_logs, follow=True),
),
]
)
# Legacy routing to support requests for not installed addons # Legacy routing to support requests for not installed addons
api_store = APIStore() api_store = APIStore()
api_store.coresys = self.coresys api_store.coresys = self.coresys
@ -597,9 +470,7 @@ class RestAPI(CoreSysAttributes):
web.post("/ingress/session", api_ingress.create_session), web.post("/ingress/session", api_ingress.create_session),
web.post("/ingress/validate_session", api_ingress.validate_session), web.post("/ingress/validate_session", api_ingress.validate_session),
web.get("/ingress/panels", api_ingress.panels), web.get("/ingress/panels", api_ingress.panels),
web.route( web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
hdrs.METH_ANY, "/ingress/{token}/{path:.*}", api_ingress.handler
),
] ]
) )
@ -610,12 +481,10 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/backups", api_backups.list_backups), web.get("/backups", api_backups.list),
web.get("/backups/info", api_backups.info), web.get("/backups/info", api_backups.info),
web.post("/backups/options", api_backups.options), web.post("/backups/options", api_backups.options),
web.post("/backups/reload", api_backups.reload), web.post("/backups/reload", api_backups.reload),
web.post("/backups/freeze", api_backups.freeze),
web.post("/backups/thaw", api_backups.thaw),
web.post("/backups/new/full", api_backups.backup_full), web.post("/backups/new/full", api_backups.backup_full),
web.post("/backups/new/partial", api_backups.backup_partial), web.post("/backups/new/partial", api_backups.backup_partial),
web.post("/backups/new/upload", api_backups.upload), web.post("/backups/new/upload", api_backups.upload),
@ -637,7 +506,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/services", api_services.list_services), web.get("/services", api_services.list),
web.get("/services/{service}", api_services.get_service), web.get("/services/{service}", api_services.get_service),
web.post("/services/{service}", api_services.set_service), web.post("/services/{service}", api_services.set_service),
web.delete("/services/{service}", api_services.del_service), web.delete("/services/{service}", api_services.del_service),
@ -651,7 +520,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/discovery", api_discovery.list_discovery), web.get("/discovery", api_discovery.list),
web.get("/discovery/{uuid}", api_discovery.get_discovery), web.get("/discovery/{uuid}", api_discovery.get_discovery),
web.delete("/discovery/{uuid}", api_discovery.del_discovery), web.delete("/discovery/{uuid}", api_discovery.del_discovery),
web.post("/discovery", api_discovery.set_discovery), web.post("/discovery", api_discovery.set_discovery),
@ -667,6 +536,7 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/dns/info", api_dns.info), web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats), web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update), web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options), web.post("/dns/options", api_dns.options),
web.post("/dns/restart", api_dns.restart), web.post("/dns/restart", api_dns.restart),
@ -674,17 +544,18 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/dns", "hassio_dns")
def _register_audio(self) -> None: def _register_audio(self) -> None:
"""Register Audio functions.""" """Register Audio functions."""
api_audio = APIAudio() api_audio = APIAudio()
api_audio.coresys = self.coresys api_audio.coresys = self.coresys
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/audio/info", api_audio.info), web.get("/audio/info", api_audio.info),
web.get("/audio/stats", api_audio.stats), web.get("/audio/stats", api_audio.stats),
web.get("/audio/logs", api_audio.logs),
web.post("/audio/update", api_audio.update), web.post("/audio/update", api_audio.update),
web.post("/audio/restart", api_audio.restart), web.post("/audio/restart", api_audio.restart),
web.post("/audio/reload", api_audio.reload), web.post("/audio/reload", api_audio.reload),
@ -697,8 +568,6 @@ class RestAPI(CoreSysAttributes):
] ]
) )
self._register_advanced_logs("/audio", "hassio_audio")
def _register_mounts(self) -> None: def _register_mounts(self) -> None:
"""Register mounts endpoints.""" """Register mounts endpoints."""
api_mounts = APIMounts() api_mounts = APIMounts()
@ -725,6 +594,7 @@ class RestAPI(CoreSysAttributes):
web.get("/store", api_store.store_info), web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list), web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info), web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon), web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo), web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get( web.get(
@ -746,8 +616,6 @@ class RestAPI(CoreSysAttributes):
"/store/addons/{addon}/update/{version}", "/store/addons/{addon}/update/{version}",
api_store.addons_addon_update, api_store.addons_addon_update,
), ),
# Must be below others since it has a wildcard in resource path
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post("/store/reload", api_store.reload), web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list), web.get("/store/repositories", api_store.repositories_list),
web.get( web.get(
@ -777,9 +645,10 @@ class RestAPI(CoreSysAttributes):
] ]
) )
def _register_panel(self) -> list[StaticResourceConfig]: def _register_panel(self) -> None:
"""Register panel for Home Assistant.""" """Register panel for Home Assistant."""
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))] panel_dir = Path(__file__).parent.joinpath("panel")
self.webapp.add_routes([web.static("/app", panel_dir)])
def _register_docker(self) -> None: def _register_docker(self) -> None:
"""Register docker configuration functions.""" """Register docker configuration functions."""
@ -789,7 +658,6 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/docker/info", api_docker.info), web.get("/docker/info", api_docker.info),
web.post("/docker/options", api_docker.options),
web.get("/docker/registries", api_docker.registries), web.get("/docker/registries", api_docker.registries),
web.post("/docker/registries", api_docker.create_registry), web.post("/docker/registries", api_docker.create_registry),
web.delete("/docker/registries/{hostname}", api_docker.remove_registry), web.delete("/docker/registries/{hostname}", api_docker.remove_registry),
@ -799,7 +667,9 @@ class RestAPI(CoreSysAttributes):
async def start(self) -> None: async def start(self) -> None:
"""Run RESTful API webserver.""" """Run RESTful API webserver."""
await self._runner.setup() await self._runner.setup()
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80) self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
)
try: try:
await self._site.start() await self._site.start()

View File

@ -1,14 +1,14 @@
"""Init file for Supervisor Home Assistant RESTful API.""" """Init file for Supervisor Home Assistant RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
from typing import Any, TypedDict from typing import Any
from aiohttp import web from aiohttp import web
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from ..addons import AnyAddon
from ..addons.addon import Addon from ..addons.addon import Addon
from ..addons.utils import rating_security from ..addons.utils import rating_security
from ..const import ( from ..const import (
@ -36,7 +36,6 @@ from ..const import (
ATTR_DNS, ATTR_DNS,
ATTR_DOCKER_API, ATTR_DOCKER_API,
ATTR_DOCUMENTATION, ATTR_DOCUMENTATION,
ATTR_FORCE,
ATTR_FULL_ACCESS, ATTR_FULL_ACCESS,
ATTR_GPIO, ATTR_GPIO,
ATTR_HASSIO_API, ATTR_HASSIO_API,
@ -63,6 +62,7 @@ from ..const import (
ATTR_MEMORY_LIMIT, ATTR_MEMORY_LIMIT,
ATTR_MEMORY_PERCENT, ATTR_MEMORY_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_USAGE,
ATTR_MESSAGE,
ATTR_NAME, ATTR_NAME,
ATTR_NETWORK, ATTR_NETWORK,
ATTR_NETWORK_DESCRIPTION, ATTR_NETWORK_DESCRIPTION,
@ -71,6 +71,7 @@ from ..const import (
ATTR_OPTIONS, ATTR_OPTIONS,
ATTR_PRIVILEGED, ATTR_PRIVILEGED,
ATTR_PROTECTED, ATTR_PROTECTED,
ATTR_PWNED,
ATTR_RATING, ATTR_RATING,
ATTR_REPOSITORY, ATTR_REPOSITORY,
ATTR_SCHEMA, ATTR_SCHEMA,
@ -80,14 +81,13 @@ from ..const import (
ATTR_STARTUP, ATTR_STARTUP,
ATTR_STATE, ATTR_STATE,
ATTR_STDIN, ATTR_STDIN,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TRANSLATIONS, ATTR_TRANSLATIONS,
ATTR_UART, ATTR_UART,
ATTR_UDEV, ATTR_UDEV,
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_URL, ATTR_URL,
ATTR_USB, ATTR_USB,
ATTR_VALID,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
ATTR_VIDEO, ATTR_VIDEO,
@ -95,7 +95,6 @@ from ..const import (
ATTR_WEBUI, ATTR_WEBUI,
REQUEST_FROM, REQUEST_FROM,
AddonBoot, AddonBoot,
AddonBootConfig,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
@ -103,13 +102,12 @@ from ..exceptions import (
APIAddonNotInstalled, APIAddonNotInstalled,
APIError, APIError,
APIForbidden, APIForbidden,
APINotFound,
PwnedError, PwnedError,
PwnedSecret, PwnedSecret,
) )
from ..validate import docker_ports from ..validate import docker_ports
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
from .utils import api_process, api_validate, json_loads from .utils import api_process, api_process_raw, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -128,37 +126,16 @@ SCHEMA_OPTIONS = vol.Schema(
} }
) )
SCHEMA_SYS_OPTIONS = vol.Schema( # pylint: disable=no-value-for-parameter
{
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
}
)
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()}) SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
)
SCHEMA_REBUILD = vol.Schema({vol.Optional(ATTR_FORCE, default=False): vol.Boolean()})
# pylint: enable=no-value-for-parameter
class OptionsValidateResponse(TypedDict):
"""Response object for options validate."""
message: str
valid: bool
pwned: bool | None
class APIAddons(CoreSysAttributes): class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions.""" """Handle RESTful API for add-on functions."""
def get_addon_for_request(self, request: web.Request) -> Addon: def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist.""" """Return addon, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info["addon"] addon_slug: str = request.match_info.get("addon")
# Lookup itself # Lookup itself
if addon_slug == "self": if addon_slug == "self":
@ -169,14 +146,14 @@ class APIAddons(CoreSysAttributes):
addon = self.sys_addons.get(addon_slug) addon = self.sys_addons.get(addon_slug)
if not addon: if not addon:
raise APINotFound(f"Addon {addon_slug} does not exist") raise APIError(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed: if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed") raise APIAddonNotInstalled("Addon is not installed")
return addon return addon
@api_process @api_process
async def list_addons(self, request: web.Request) -> dict[str, Any]: async def list(self, request: web.Request) -> dict[str, Any]:
"""Return all add-ons or repositories.""" """Return all add-ons or repositories."""
data_addons = [ data_addons = [
{ {
@ -197,7 +174,6 @@ class APIAddons(CoreSysAttributes):
ATTR_URL: addon.url, ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon, ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo, ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
} }
for addon in self.sys_addons.installed for addon in self.sys_addons.installed
] ]
@ -211,7 +187,7 @@ class APIAddons(CoreSysAttributes):
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information.""" """Return add-on information."""
addon: Addon = self.get_addon_for_request(request) addon: AnyAddon = self._extract_addon(request)
data = { data = {
ATTR_NAME: addon.name, ATTR_NAME: addon.name,
@ -219,14 +195,13 @@ class APIAddons(CoreSysAttributes):
ATTR_HOSTNAME: addon.hostname, ATTR_HOSTNAME: addon.hostname,
ATTR_DNS: addon.dns, ATTR_DNS: addon.dns,
ATTR_DESCRIPTON: addon.description, ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: await addon.long_description(), ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_ADVANCED: addon.advanced, ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage, ATTR_STAGE: addon.stage,
ATTR_REPOSITORY: addon.repository, ATTR_REPOSITORY: addon.repository,
ATTR_VERSION_LATEST: addon.latest_version, ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected, ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon), ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot, ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options, ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui, ATTR_SCHEMA: addon.schema_ui,
@ -286,8 +261,6 @@ class APIAddons(CoreSysAttributes):
ATTR_WATCHDOG: addon.watchdog, ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices], + [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
} }
return data return data
@ -295,7 +268,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def options(self, request: web.Request) -> None: async def options(self, request: web.Request) -> None:
"""Store user options for add-on.""" """Store user options for add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
# Update secrets for validation # Update secrets for validation
await self.sys_homeassistant.secrets.reload() await self.sys_homeassistant.secrets.reload()
@ -310,10 +283,6 @@ class APIAddons(CoreSysAttributes):
if ATTR_OPTIONS in body: if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS] addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body: if ATTR_BOOT in body:
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise APIError(
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
)
addon.boot = body[ATTR_BOOT] addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body: if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE] addon.auto_update = body[ATTR_AUTO_UPDATE]
@ -329,27 +298,13 @@ class APIAddons(CoreSysAttributes):
if ATTR_WATCHDOG in body: if ATTR_WATCHDOG in body:
addon.watchdog = body[ATTR_WATCHDOG] addon.watchdog = body[ATTR_WATCHDOG]
await addon.save_persist() addon.save_persist()
@api_process @api_process
async def sys_options(self, request: web.Request) -> None: async def options_validate(self, request: web.Request) -> None:
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
await addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> OptionsValidateResponse:
"""Validate user options for add-on.""" """Validate user options for add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
data = OptionsValidateResponse(message="", valid=True, pwned=False) data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options options = await request.json(loads=json_loads) or addon.options
@ -358,8 +313,8 @@ class APIAddons(CoreSysAttributes):
try: try:
options_schema.validate(options) options_schema.validate(options)
except vol.Invalid as ex: except vol.Invalid as ex:
data["message"] = humanize_error(options, ex) data[ATTR_MESSAGE] = humanize_error(options, ex)
data["valid"] = False data[ATTR_VALID] = False
if not self.sys_security.pwned: if not self.sys_security.pwned:
return data return data
@ -370,27 +325,27 @@ class APIAddons(CoreSysAttributes):
await self.sys_security.verify_secret(secret) await self.sys_security.verify_secret(secret)
continue continue
except PwnedSecret: except PwnedSecret:
data["pwned"] = True data[ATTR_PWNED] = True
except PwnedError: except PwnedError:
data["pwned"] = None data[ATTR_PWNED] = None
break break
if self.sys_security.force and data["pwned"] in (None, True): if self.sys_security.force and data[ATTR_PWNED] in (None, True):
data["valid"] = False data[ATTR_VALID] = False
if data["pwned"] is None: if data[ATTR_PWNED] is None:
data["message"] = "Error happening on pwned secrets check!" data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
else: else:
data["message"] = "Add-on uses pwned secrets!" data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
return data return data
@api_process @api_process
async def options_config(self, request: web.Request) -> None: async def options_config(self, request: web.Request) -> None:
"""Validate user options for add-on.""" """Validate user options for add-on."""
slug: str = request.match_info["addon"] slug: str = request.match_info.get("addon")
if slug != "self": if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!") raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
# Lookup/reload secrets # Lookup/reload secrets
await self.sys_homeassistant.secrets.reload() await self.sys_homeassistant.secrets.reload()
@ -402,19 +357,19 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def security(self, request: web.Request) -> None: async def security(self, request: web.Request) -> None:
"""Store security options for add-on.""" """Store security options for add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body: if ATTR_PROTECTED in body:
_LOGGER.warning("Changing protected flag for %s!", addon.slug) _LOGGER.warning("Changing protected flag for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED] addon.protected = body[ATTR_PROTECTED]
await addon.save_persist() addon.save_persist()
@api_process @api_process
async def stats(self, request: web.Request) -> dict[str, Any]: async def stats(self, request: web.Request) -> dict[str, Any]:
"""Return resource information.""" """Return resource information."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
stats: DockerStats = await addon.stats() stats: DockerStats = await addon.stats()
@ -430,51 +385,48 @@ class APIAddons(CoreSysAttributes):
} }
@api_process @api_process
async def uninstall(self, request: web.Request) -> Awaitable[None]: def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on.""" """Uninstall add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request) return asyncio.shield(addon.uninstall())
return await asyncio.shield(
self.sys_addons.uninstall(
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
)
)
@api_process @api_process
async def start(self, request: web.Request) -> None: async def start(self, request: web.Request) -> None:
"""Start add-on.""" """Start add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.start()): if start_task := await asyncio.shield(addon.start()):
await start_task await start_task
@api_process @api_process
def stop(self, request: web.Request) -> Awaitable[None]: def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on.""" """Stop add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
return asyncio.shield(addon.stop()) return asyncio.shield(addon.stop())
@api_process @api_process
async def restart(self, request: web.Request) -> None: async def restart(self, request: web.Request) -> None:
"""Restart add-on.""" """Restart add-on."""
addon: Addon = self.get_addon_for_request(request) addon: Addon = self._extract_addon(request)
if start_task := await asyncio.shield(addon.restart()): if start_task := await asyncio.shield(addon.restart()):
await start_task await start_task
@api_process @api_process
async def rebuild(self, request: web.Request) -> None: async def rebuild(self, request: web.Request) -> None:
"""Rebuild local build add-on.""" """Rebuild local build add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_REBUILD, request) if start_task := await asyncio.shield(addon.rebuild()):
if start_task := await asyncio.shield(
self.sys_addons.rebuild(addon.slug, force=body[ATTR_FORCE])
):
await start_task await start_task
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on."""
addon = self._extract_addon(request)
return addon.logs()
@api_process @api_process
async def stdin(self, request: web.Request) -> None: async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on.""" """Write to stdin of add-on."""
addon = self.get_addon_for_request(request) addon = self._extract_addon(request)
if not addon.with_stdin: if not addon.with_stdin:
raise APIError(f"STDIN not supported the {addon.slug} add-on") raise APIError(f"STDIN not supported the {addon.slug} add-on")

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor Audio RESTful API.""" """Init file for Supervisor Audio RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
from dataclasses import asdict from dataclasses import asdict
@ -36,7 +35,8 @@ from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..host.sound import StreamType from ..host.sound import StreamType
from ..validate import version_tag from ..validate import version_tag
from .utils import api_process, api_validate from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -111,6 +111,11 @@ class APIAudio(CoreSysAttributes):
raise APIError(f"Version {version} is already in use") raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.audio.update(version)) await asyncio.shield(self.sys_plugins.audio.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Audio Docker logs."""
return self.sys_plugins.audio.logs()
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Audio plugin.""" """Restart Audio plugin."""
@ -124,7 +129,7 @@ class APIAudio(CoreSysAttributes):
@api_process @api_process
async def set_volume(self, request: web.Request) -> None: async def set_volume(self, request: web.Request) -> None:
"""Set audio volume on stream.""" """Set audio volume on stream."""
source: StreamType = StreamType(request.match_info["source"]) source: StreamType = StreamType(request.match_info.get("source"))
application: bool = request.path.endswith("application") application: bool = request.path.endswith("application")
body = await api_validate(SCHEMA_VOLUME, request) body = await api_validate(SCHEMA_VOLUME, request)
@ -137,7 +142,7 @@ class APIAudio(CoreSysAttributes):
@api_process @api_process
async def set_mute(self, request: web.Request) -> None: async def set_mute(self, request: web.Request) -> None:
"""Mute audio volume on stream.""" """Mute audio volume on stream."""
source: StreamType = StreamType(request.match_info["source"]) source: StreamType = StreamType(request.match_info.get("source"))
application: bool = request.path.endswith("application") application: bool = request.path.endswith("application")
body = await api_validate(SCHEMA_MUTE, request) body = await api_validate(SCHEMA_MUTE, request)
@ -150,7 +155,7 @@ class APIAudio(CoreSysAttributes):
@api_process @api_process
async def set_default(self, request: web.Request) -> None: async def set_default(self, request: web.Request) -> None:
"""Set audio default stream.""" """Set audio default stream."""
source: StreamType = StreamType(request.match_info["source"]) source: StreamType = StreamType(request.match_info.get("source"))
body = await api_validate(SCHEMA_DEFAULT, request) body = await api_validate(SCHEMA_DEFAULT, request)
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME])) await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))

View File

@ -1,31 +1,18 @@
"""Init file for Supervisor auth/SSO RESTful API.""" """Init file for Supervisor auth/SSO RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable
import logging import logging
from typing import Any, cast
from aiohttp import BasicAuth, web from aiohttp import BasicAuth, web
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
from aiohttp.web import FileField
from aiohttp.web_exceptions import HTTPUnauthorized from aiohttp.web_exceptions import HTTPUnauthorized
from multidict import MultiDictProxy
import voluptuous as vol import voluptuous as vol
from ..addons.addon import Addon from ..addons.addon import Addon
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden from ..exceptions import APIForbidden
from .const import ( from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
ATTR_GROUP_IDS, from .utils import api_process, api_validate
ATTR_IS_ACTIVE,
ATTR_IS_OWNER,
ATTR_LOCAL_ONLY,
ATTR_USERS,
CONTENT_TYPE_JSON,
CONTENT_TYPE_URL,
)
from .utils import api_process, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -44,7 +31,7 @@ REALM_HEADER: dict[str, str] = {
class APIAuth(CoreSysAttributes): class APIAuth(CoreSysAttributes):
"""Handle RESTful API for auth functions.""" """Handle RESTful API for auth functions."""
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]: def _process_basic(self, request: web.Request, addon: Addon) -> bool:
"""Process login request with basic auth. """Process login request with basic auth.
Return a coroutine. Return a coroutine.
@ -53,11 +40,8 @@ class APIAuth(CoreSysAttributes):
return self.sys_auth.check_login(addon, auth.login, auth.password) return self.sys_auth.check_login(addon, auth.login, auth.password)
def _process_dict( def _process_dict(
self, self, request: web.Request, addon: Addon, data: dict[str, str]
request: web.Request, ) -> bool:
addon: Addon,
data: dict[str, Any] | MultiDictProxy[str | bytes | FileField],
) -> Awaitable[bool]:
"""Process login with dict data. """Process login with dict data.
Return a coroutine. Return a coroutine.
@ -65,22 +49,14 @@ class APIAuth(CoreSysAttributes):
username = data.get("username") or data.get("user") username = data.get("username") or data.get("user")
password = data.get("password") password = data.get("password")
# Test that we did receive strings and not something else, raise if so return self.sys_auth.check_login(addon, username, password)
try:
_ = username.encode and password.encode # type: ignore
except AttributeError:
raise HTTPUnauthorized(headers=REALM_HEADER) from None
return self.sys_auth.check_login(
addon, cast(str, username), cast(str, password)
)
@api_process @api_process
async def auth(self, request: web.Request) -> bool: async def auth(self, request: web.Request) -> bool:
"""Process login request.""" """Process login request."""
addon = request[REQUEST_FROM] addon = request[REQUEST_FROM]
if not isinstance(addon, Addon) or not addon.access_auth_api: if not addon.access_auth_api:
raise APIForbidden("Can't use Home Assistant auth!") raise APIForbidden("Can't use Home Assistant auth!")
# BasicAuth # BasicAuth
@ -91,19 +67,14 @@ class APIAuth(CoreSysAttributes):
# Json # Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON: if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json(loads=json_loads) data = await request.json()
if not await self._process_dict(request, addon, data): return await self._process_dict(request, addon, data)
raise HTTPUnauthorized()
return True
# URL encoded # URL encoded
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL: if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
data = await request.post() data = await request.post()
if not await self._process_dict(request, addon, data): return await self._process_dict(request, addon, data)
raise HTTPUnauthorized()
return True
# Advertise Basic authentication by default
raise HTTPUnauthorized(headers=REALM_HEADER) raise HTTPUnauthorized(headers=REALM_HEADER)
@api_process @api_process
@ -117,22 +88,4 @@ class APIAuth(CoreSysAttributes):
@api_process @api_process
async def cache(self, request: web.Request) -> None: async def cache(self, request: web.Request) -> None:
"""Process cache reset request.""" """Process cache reset request."""
await self.sys_auth.reset_data() self.sys_auth.reset_data()
@api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
"""List users on the Home Assistant instance."""
return {
ATTR_USERS: [
{
ATTR_USERNAME: user[ATTR_USERNAME],
ATTR_NAME: user[ATTR_NAME],
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
}
for user in await self.sys_auth.list_users()
if user[ATTR_USERNAME]
]
}

View File

@ -1,24 +1,15 @@
"""Backups RESTful API.""" """Backups RESTful API."""
from __future__ import annotations
import asyncio import asyncio
from collections.abc import Callable
import errno
from io import IOBase
import logging import logging
from pathlib import Path from pathlib import Path
import re import re
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from typing import Any, cast from typing import Any
from aiohttp import BodyPartReader, web from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..backups.backup import Backup
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_ADDONS,
@ -27,117 +18,67 @@ from ..const import (
ATTR_CONTENT, ATTR_CONTENT,
ATTR_DATE, ATTR_DATE,
ATTR_DAYS_UNTIL_STALE, ATTR_DAYS_UNTIL_STALE,
ATTR_EXTRA,
ATTR_FILENAME,
ATTR_FOLDERS, ATTR_FOLDERS,
ATTR_HOMEASSISTANT, ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE, ATTR_LOCATON,
ATTR_JOB_ID,
ATTR_LOCATION,
ATTR_NAME, ATTR_NAME,
ATTR_PASSWORD, ATTR_PASSWORD,
ATTR_PROTECTED, ATTR_PROTECTED,
ATTR_REPOSITORIES, ATTR_REPOSITORIES,
ATTR_SIZE, ATTR_SIZE,
ATTR_SIZE_BYTES,
ATTR_SLUG, ATTR_SLUG,
ATTR_SUPERVISOR_VERSION, ATTR_SUPERVISOR_VERSION,
ATTR_TIMEOUT,
ATTR_TYPE, ATTR_TYPE,
ATTR_VERSION, ATTR_VERSION,
REQUEST_FROM,
BusEvent,
CoreState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden, APINotFound from ..exceptions import APIError
from ..jobs import JobSchedulerOptions, SupervisorJob
from ..mounts.const import MountUsage from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason from .const import CONTENT_TYPE_TAR
from .const import (
ATTR_ADDITIONAL_LOCATIONS,
ATTR_BACKGROUND,
ATTR_LOCATION_ATTRIBUTES,
ATTR_LOCATIONS,
CONTENT_TYPE_TAR,
)
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
ALL_ADDONS_FLAG = "ALL"
LOCATION_LOCAL = ".local"
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
# Backwards compatible # Backwards compatible
# Remove: 2022.08 # Remove: 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT] _ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
def _ensure_list(item: Any) -> list:
"""Ensure value is a list."""
if not isinstance(item, list):
return [item]
return item
def _convert_local_location(item: str | None) -> str | None:
"""Convert local location value."""
if item in {LOCATION_LOCAL, ""}:
return None
return item
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()) SCHEMA_RESTORE_PARTIAL = vol.Schema(
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
SCHEMA_RESTORE_FULL = vol.Schema(
{ {
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION, vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
} }
) )
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend( SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
}
)
SCHEMA_BACKUP_FULL = vol.Schema( SCHEMA_BACKUP_FULL = vol.Schema(
{ {
vol.Optional(ATTR_NAME): str, vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST, vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
vol.Optional(ATTR_EXTRA): dict,
} }
) )
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{ {
vol.Optional(ATTR_ADDONS): vol.Or( vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
ALL_ADDONS_FLAG, vol.All([str], vol.Unique()) vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
} }
) )
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale}) SCHEMA_OPTIONS = vol.Schema(
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))}) {
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST}) vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
}
)
class APIBackups(CoreSysAttributes): class APIBackups(CoreSysAttributes):
@ -147,19 +88,9 @@ class APIBackups(CoreSysAttributes):
"""Return backup, throw an exception if it doesn't exist.""" """Return backup, throw an exception if it doesn't exist."""
backup = self.sys_backups.get(request.match_info.get("slug")) backup = self.sys_backups.get(request.match_info.get("slug"))
if not backup: if not backup:
raise APINotFound("Backup does not exist") raise APIError("Backup does not exist")
return backup return backup
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
"""Make location attributes dictionary."""
return {
loc if loc else LOCATION_LOCAL: {
ATTR_PROTECTED: backup.all_locations[loc].protected,
ATTR_SIZE_BYTES: backup.all_locations[loc].size_bytes,
}
for loc in backup.locations
}
def _list_backups(self): def _list_backups(self):
"""Return list of backups.""" """Return list of backups."""
return [ return [
@ -169,11 +100,8 @@ class APIBackups(CoreSysAttributes):
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type, ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes, ATTR_LOCATON: backup.location,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_COMPRESSED: backup.compressed, ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: { ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None, ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
@ -182,11 +110,10 @@ class APIBackups(CoreSysAttributes):
}, },
} }
for backup in self.sys_backups.list_backups for backup in self.sys_backups.list_backups
if backup.location != LOCATION_CLOUD_BACKUP
] ]
@api_process @api_process
async def list_backups(self, request): async def list(self, request):
"""Return backup list.""" """Return backup list."""
data_backups = self._list_backups() data_backups = self._list_backups()
@ -212,10 +139,10 @@ class APIBackups(CoreSysAttributes):
if ATTR_DAYS_UNTIL_STALE in body: if ATTR_DAYS_UNTIL_STALE in body:
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE] self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
await self.sys_backups.save_data() self.sys_backups.save_data()
@api_process @api_process
async def reload(self, _): async def reload(self, request):
"""Reload backup list.""" """Reload backup list."""
await asyncio.shield(self.sys_backups.reload()) await asyncio.shield(self.sys_backups.reload())
return True return True
@ -242,338 +169,112 @@ class APIBackups(CoreSysAttributes):
ATTR_NAME: backup.name, ATTR_NAME: backup.name,
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_COMPRESSED: backup.compressed, ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_SUPERVISOR_VERSION: backup.supervisor_version, ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version, ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATION: backup.location, ATTR_LOCATON: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_ADDONS: data_addons, ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories, ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders, ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
ATTR_EXTRA: backup.extra,
} }
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE: def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Convert a single location to a mount if possible.""" """Change location field to mount if necessary."""
if not location or location == LOCATION_CLOUD_BACKUP: if not body.get(ATTR_LOCATON):
return cast(LOCATION_TYPE, location) return body
mount = self.sys_mounts.get(location) body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
if mount.usage != MountUsage.BACKUP: if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
raise APIError( raise APIError(
f"Mount {mount.name} is not used for backups, cannot backup to there" f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
) )
return mount
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Change location field to mount if necessary."""
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
return body return body
def _validate_cloud_backup_location(
self, request: web.Request, location: list[str | None] | str | None
) -> None:
"""Cloud backup location is only available to Home Assistant."""
if not isinstance(location, list):
location = [location]
if (
LOCATION_CLOUD_BACKUP in location
and request.get(REQUEST_FROM) != self.sys_homeassistant
):
raise APIForbidden(
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
)
async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]:
"""Start backup task in background and return task and job ID."""
event = asyncio.Event()
job, backup_task = cast(
tuple[SupervisorJob, asyncio.Task],
self.sys_jobs.schedule_job(
backup_method, JobSchedulerOptions(), *args, **kwargs
),
)
async def release_on_freeze(new_state: CoreState):
if new_state == CoreState.FREEZE:
event.set()
# Wait for system to get into freeze state before returning
# If the backup fails validation it will raise before getting there
listener = self.sys_bus.register_event(
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
)
try:
event_task = self.sys_create_task(event.wait())
_, pending = await asyncio.wait(
(backup_task, event_task),
return_when=asyncio.FIRST_COMPLETED,
)
# It seems backup returned early (error or something), make sure to cancel
# the event task to avoid "Task was destroyed but it is pending!" errors.
if event_task in pending:
event_task.cancel()
return (backup_task, job.uuid)
finally:
self.sys_bus.remove_listener(listener)
@api_process @api_process
async def backup_full(self, request: web.Request): async def backup_full(self, request):
"""Create full backup.""" """Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request) body = await api_validate(SCHEMA_BACKUP_FULL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body: backup = await asyncio.shield(
location_names: list[str | None] = body.pop(ATTR_LOCATION) self.sys_backups.do_backup_full(**self._location_to_mount(body))
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **body
) )
if background and not backup_task.done(): if backup:
return {ATTR_JOB_ID: job_id} return {ATTR_SLUG: backup.slug}
return False
backup: Backup = await backup_task
if backup: @api_process
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug} async def backup_partial(self, request):
raise APIError( """Create a partial backup."""
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details", body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
job_id=job_id, backup = await asyncio.shield(
) self.sys_backups.do_backup_partial(**self._location_to_mount(body))
)
@api_process
async def backup_partial(self, request: web.Request): if backup:
"""Create a partial backup.""" return {ATTR_SLUG: backup.slug}
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) return False
locations: list[LOCATION_TYPE] | None = None
@api_process
if ATTR_LOCATION in body: async def restore_full(self, request):
location_names: list[str | None] = body.pop(ATTR_LOCATION) """Full restore of a backup."""
self._validate_cloud_backup_location(request, location_names) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
locations = [
self._location_to_mount(location) for location in location_names return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
]
body[ATTR_LOCATION] = locations.pop(0) @api_process
if locations: async def restore_partial(self, request):
body[ATTR_ADDITIONAL_LOCATIONS] = locations """Partial restore a backup."""
backup = self._extract_slug(request)
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG: body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
body[ATTR_ADDONS] = list(self.sys_addons.local)
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task( @api_process
self.sys_backups.do_backup_partial, **body async def remove(self, request):
) """Remove a backup."""
backup = self._extract_slug(request)
if background and not backup_task.done(): return self.sys_backups.remove(backup)
return {ATTR_JOB_ID: job_id}
async def download(self, request):
backup: Backup = await backup_task """Download a backup file."""
if backup: backup = self._extract_slug(request)
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError( _LOGGER.info("Downloading backup %s", backup.slug)
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details", response = web.FileResponse(backup.tarfile)
job_id=job_id, response.content_type = CONTENT_TYPE_TAR
) response.headers[
CONTENT_DISPOSITION
@api_process ] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
async def restore_full(self, request: web.Request): return response
"""Full restore of a backup."""
backup = self._extract_slug(request) @api_process
body = await api_validate(SCHEMA_RESTORE_FULL, request) async def upload(self, request):
self._validate_cloud_backup_location( """Upload a backup file."""
request, body.get(ATTR_LOCATION, backup.location) with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
) tar_file = Path(temp_dir, "backup.tar")
background = body.pop(ATTR_BACKGROUND) reader = await request.multipart()
restore_task, job_id = await self._background_backup_task( contents = await reader.next()
self.sys_backups.do_restore_full, backup, **body try:
) with tar_file.open("wb") as backup:
while True:
if background and not restore_task.done() or await restore_task: chunk = await contents.read_chunk()
return {ATTR_JOB_ID: job_id} if not chunk:
raise APIError( break
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details", backup.write(chunk)
job_id=job_id,
) except OSError as err:
_LOGGER.error("Can't write new backup file: %s", err)
@api_process return False
async def restore_partial(self, request: web.Request):
"""Partial restore a backup.""" except asyncio.CancelledError:
backup = self._extract_slug(request) return False
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
self._validate_cloud_backup_location( backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process
async def freeze(self, request: web.Request):
"""Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process
async def thaw(self, request: web.Request):
"""Begin thaw after manual freeze."""
await self.sys_backups.thaw_all()
@api_process
async def remove(self, request: web.Request):
"""Remove a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_REMOVE, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body:
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
else:
self._validate_cloud_backup_location(request, backup.location)
await self.sys_backups.remove(backup, locations=locations)
@api_process
async def download(self, request: web.Request):
"""Download a backup file."""
backup = self._extract_slug(request)
# Query will give us '' for /backups, convert value to None
location = _convert_local_location(
request.query.get(ATTR_LOCATION, backup.location)
)
self._validate_cloud_backup_location(request, location)
if location not in backup.all_locations:
raise APIError(f"Backup {backup.slug} is not in location {location}")
_LOGGER.info("Downloading backup %s", backup.slug)
filename = backup.all_locations[location].path
# If the file is missing, return 404 and trigger reload of location
if not await self.sys_run_in_executor(filename.is_file):
self.sys_create_task(self.sys_backups.reload(location))
return web.Response(status=404)
response = web.FileResponse(filename)
response.content_type = CONTENT_TYPE_TAR
download_filename = filename.name
if download_filename == f"{backup.slug}.tar":
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={download_filename}"
)
return response
@api_process
async def upload(self, request: web.Request):
"""Upload a backup file."""
location: LOCATION_TYPE = None
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in request.query:
location_names: list[str] = request.query.getall(ATTR_LOCATION, [])
self._validate_cloud_backup_location(
request, cast(list[str | None], location_names)
)
# Convert empty string to None if necessary
locations = [
self._location_to_mount(location)
if _convert_local_location(location)
else None
for location in location_names
]
location = locations.pop(0)
filename: str | None = None
if ATTR_FILENAME in request.query:
filename = request.query.get(ATTR_FILENAME)
try:
vol.Match(RE_BACKUP_FILENAME)(filename)
except vol.Invalid as ex:
raise APIError(humanize_error(filename, ex)) from None
tmp_path = await self.sys_backups.get_upload_path_for_location(location)
temp_dir: TemporaryDirectory | None = None
backup_file_stream: IOBase | None = None
def open_backup_file() -> Path:
nonlocal temp_dir, backup_file_stream
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
tar_file = Path(temp_dir.name, "upload.tar")
backup_file_stream = tar_file.open("wb")
return tar_file
def close_backup_file() -> None:
if backup_file_stream:
# Make sure it got closed, in case of exception. It is safe to
# close the file stream twice.
backup_file_stream.close()
if temp_dir:
temp_dir.cleanup()
try:
reader = await request.multipart()
contents = await reader.next()
if not isinstance(contents, BodyPartReader):
raise APIError("Improperly formatted upload, could not read backup")
tar_file = await self.sys_run_in_executor(open_backup_file)
while chunk := await contents.read_chunk(size=2**16):
await self.sys_run_in_executor(
cast(IOBase, backup_file_stream).write, chunk
)
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
backup = await asyncio.shield(
self.sys_backups.import_backup(
tar_file,
filename,
location=location,
additional_locations=locations,
)
)
except OSError as err:
if err.errno == errno.EBADMSG and location in {
LOCATION_CLOUD_BACKUP,
None,
}:
self.sys_resolution.add_unhealthy_reason(
UnhealthyReason.OSERROR_BAD_MESSAGE
)
_LOGGER.error("Can't write new backup file: %s", err)
return False
except asyncio.CancelledError:
return False
finally:
await self.sys_run_in_executor(close_backup_file)
if backup: if backup:
return {ATTR_SLUG: backup.slug} return {ATTR_SLUG: backup.slug}
return False return False

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor HA cli RESTful API.""" """Init file for Supervisor HA cli RESTful API."""
import asyncio import asyncio
import logging import logging
from typing import Any from typing import Any

View File

@ -1,26 +1,18 @@
"""Const for API.""" """Const for API."""
from enum import StrEnum
CONTENT_TYPE_BINARY = "application/octet-stream" CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_JSON = "application/json" CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_PNG = "image/png" CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_TAR = "application/tar" CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_TEXT = "text/plain" CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded" CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session" COOKIE_INGRESS = "ingress_session"
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
ATTR_AGENT_VERSION = "agent_version" ATTR_AGENT_VERSION = "agent_version"
ATTR_APPARMOR_VERSION = "apparmor_version" ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes" ATTR_ATTRIBUTES = "attributes"
ATTR_AVAILABLE_UPDATES = "available_updates" ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_CONFIG = "boot_config"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp" ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BOOTS = "boots" ATTR_BOOTS = "boots"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr" ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
@ -31,6 +23,7 @@ ATTR_CONNECTION_BUS = "connection_bus"
ATTR_DATA_DISK = "data_disk" ATTR_DATA_DISK = "data_disk"
ATTR_DEVICE = "device" ATTR_DEVICE = "device"
ATTR_DEV_PATH = "dev_path" ATTR_DEV_PATH = "dev_path"
ATTR_DISK_LED = "disk_led"
ATTR_DISKS = "disks" ATTR_DISKS = "disks"
ATTR_DRIVES = "drives" ATTR_DRIVES = "drives"
ATTR_DT_SYNCHRONIZED = "dt_synchronized" ATTR_DT_SYNCHRONIZED = "dt_synchronized"
@ -38,53 +31,26 @@ ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable" ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback" ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems" ATTR_FILESYSTEMS = "filesystems"
ATTR_FORCE = "force" ATTR_HEARTBEAT_LED = "heartbeat_led"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers" ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner"
ATTR_JOBS = "jobs" ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr" ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname" ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only"
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
ATTR_LOCATIONS = "locations"
ATTR_MDNS = "mdns" ATTR_MDNS = "mdns"
ATTR_MODEL = "model" ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts" ATTR_MOUNTS = "mounts"
ATTR_MOUNT_POINTS = "mount_points" ATTR_MOUNT_POINTS = "mount_points"
ATTR_PANEL_PATH = "panel_path" ATTR_PANEL_PATH = "panel_path"
ATTR_POWER_LED = "power_led"
ATTR_REMOVABLE = "removable" ATTR_REMOVABLE = "removable"
ATTR_REMOVE_CONFIG = "remove_config"
ATTR_REVISION = "revision" ATTR_REVISION = "revision"
ATTR_SAFE_MODE = "safe_mode"
ATTR_SEAT = "seat" ATTR_SEAT = "seat"
ATTR_SIGNED = "signed" ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time" ATTR_STARTUP_TIME = "startup_time"
ATTR_STATUS = "status"
ATTR_SUBSYSTEM = "subsystem" ATTR_SUBSYSTEM = "subsystem"
ATTR_SYSFS = "sysfs" ATTR_SYSFS = "sysfs"
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
ATTR_TIME_DETECTED = "time_detected" ATTR_TIME_DETECTED = "time_detected"
ATTR_UPDATE_TYPE = "update_type" ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp" ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users" ATTR_USAGE = "usage"
ATTR_USER_PATH = "user_path"
ATTR_VENDOR = "vendor" ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization"
class BootSlot(StrEnum):
"""Boot slots used by HAOS."""
A = "A"
B = "B"
class DetectBlockingIO(StrEnum):
"""Enable/Disable detection for blocking I/O in event loop."""
OFF = "off"
ON = "on"
ON_AT_STARTUP = "on-at-startup"

View File

@ -1,9 +1,6 @@
"""Init file for Supervisor network RESTful API.""" """Init file for Supervisor network RESTful API."""
import logging import logging
from typing import Any
from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..addons.addon import Addon from ..addons.addon import Addon
@ -18,8 +15,8 @@ from ..const import (
AddonState, AddonState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..discovery import Message from ..discovery.validate import valid_discovery_service
from ..exceptions import APIForbidden, APINotFound from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -27,7 +24,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DISCOVERY = vol.Schema( SCHEMA_DISCOVERY = vol.Schema(
{ {
vol.Required(ATTR_SERVICE): str, vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_CONFIG): dict, vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
} }
) )
@ -35,16 +32,16 @@ SCHEMA_DISCOVERY = vol.Schema(
class APIDiscovery(CoreSysAttributes): class APIDiscovery(CoreSysAttributes):
"""Handle RESTful API for discovery functions.""" """Handle RESTful API for discovery functions."""
def _extract_message(self, request: web.Request) -> Message: def _extract_message(self, request):
"""Extract discovery message from URL.""" """Extract discovery message from URL."""
message = self.sys_discovery.get(request.match_info["uuid"]) message = self.sys_discovery.get(request.match_info.get("uuid"))
if not message: if not message:
raise APINotFound("Discovery message not found") raise APIError("Discovery message not found")
return message return message
@api_process @api_process
@require_home_assistant @require_home_assistant
async def list_discovery(self, request: web.Request) -> dict[str, Any]: async def list(self, request):
"""Show registered and available services.""" """Show registered and available services."""
# Get available discovery # Get available discovery
discovery = [ discovery = [
@ -55,16 +52,12 @@ class APIDiscovery(CoreSysAttributes):
ATTR_CONFIG: message.config, ATTR_CONFIG: message.config,
} }
for message in self.sys_discovery.list_messages for message in self.sys_discovery.list_messages
if ( if (addon := self.sys_addons.get(message.addon, local_only=True))
discovered := self.sys_addons.get_local_only( and addon.state == AddonState.STARTED
message.addon,
)
)
and discovered.state == AddonState.STARTED
] ]
# Get available services/add-ons # Get available services/add-ons
services: dict[str, list[str]] = {} services = {}
for addon in self.sys_addons.all: for addon in self.sys_addons.all:
for name in addon.discovery: for name in addon.discovery:
services.setdefault(name, []).append(addon.slug) services.setdefault(name, []).append(addon.slug)
@ -72,12 +65,21 @@ class APIDiscovery(CoreSysAttributes):
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services} return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
@api_process @api_process
async def set_discovery(self, request: web.Request) -> dict[str, str]: async def set_discovery(self, request):
"""Write data into a discovery pipeline.""" """Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request) body = await api_validate(SCHEMA_DISCOVERY, request)
addon: Addon = request[REQUEST_FROM] addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE] service = body[ATTR_SERVICE]
try:
valid_discovery_service(service)
except vol.Invalid:
_LOGGER.warning(
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
service,
addon.name,
)
# Access? # Access?
if body[ATTR_SERVICE] not in addon.discovery: if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error( _LOGGER.error(
@ -90,13 +92,13 @@ class APIDiscovery(CoreSysAttributes):
) )
# Process discovery message # Process discovery message
message = await self.sys_discovery.send(addon, **body) message = self.sys_discovery.send(addon, **body)
return {ATTR_UUID: message.uuid} return {ATTR_UUID: message.uuid}
@api_process @api_process
@require_home_assistant @require_home_assistant
async def get_discovery(self, request: web.Request) -> dict[str, Any]: async def get_discovery(self, request):
"""Read data into a discovery message.""" """Read data into a discovery message."""
message = self._extract_message(request) message = self._extract_message(request)
@ -108,7 +110,7 @@ class APIDiscovery(CoreSysAttributes):
} }
@api_process @api_process
async def del_discovery(self, request: web.Request) -> None: async def del_discovery(self, request):
"""Delete data into a discovery message.""" """Delete data into a discovery message."""
message = self._extract_message(request) message = self._extract_message(request)
addon = request[REQUEST_FROM] addon = request[REQUEST_FROM]
@ -117,4 +119,5 @@ class APIDiscovery(CoreSysAttributes):
if message.addon != addon.slug: if message.addon != addon.slug:
raise APIForbidden("Can't remove discovery message") raise APIForbidden("Can't remove discovery message")
await self.sys_discovery.remove(message) self.sys_discovery.remove(message)
return True

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor DNS RESTful API.""" """Init file for Supervisor DNS RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@ -27,8 +26,8 @@ from ..const import (
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import dns_server_list, version_tag from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
from .utils import api_process, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -78,7 +77,7 @@ class APICoreDNS(CoreSysAttributes):
if restart_required: if restart_required:
self.sys_create_task(self.sys_plugins.dns.restart()) self.sys_create_task(self.sys_plugins.dns.restart())
await self.sys_plugins.dns.save_data() self.sys_plugins.dns.save_data()
@api_process @api_process
async def stats(self, request: web.Request) -> dict[str, Any]: async def stats(self, request: web.Request) -> dict[str, Any]:
@ -106,6 +105,11 @@ class APICoreDNS(CoreSysAttributes):
raise APIError(f"Version {version} is already in use") raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.dns.update(version)) await asyncio.shield(self.sys_plugins.dns.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return DNS Docker logs."""
return self.sys_plugins.dns.logs()
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart CoreDNS plugin.""" """Restart CoreDNS plugin."""

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API.""" """Init file for Supervisor Home Assistant RESTful API."""
import logging import logging
from typing import Any from typing import Any
@ -7,7 +6,6 @@ from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..const import ( from ..const import (
ATTR_ENABLE_IPV6,
ATTR_HOSTNAME, ATTR_HOSTNAME,
ATTR_LOGGING, ATTR_LOGGING,
ATTR_PASSWORD, ATTR_PASSWORD,
@ -17,7 +15,6 @@ from ..const import (
ATTR_VERSION, ATTR_VERSION,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APINotFound
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -31,39 +28,10 @@ SCHEMA_DOCKER_REGISTRY = vol.Schema(
} }
) )
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_ENABLE_IPV6): vol.Boolean()})
class APIDocker(CoreSysAttributes): class APIDocker(CoreSysAttributes):
"""Handle RESTful API for Docker configuration.""" """Handle RESTful API for Docker configuration."""
@api_process
async def info(self, request: web.Request):
"""Get docker info."""
data_registries = {}
for hostname, registry in self.sys_docker.config.registries.items():
data_registries[hostname] = {
ATTR_USERNAME: registry[ATTR_USERNAME],
}
return {
ATTR_VERSION: self.sys_docker.info.version,
ATTR_ENABLE_IPV6: self.sys_docker.config.enable_ipv6,
ATTR_STORAGE: self.sys_docker.info.storage,
ATTR_LOGGING: self.sys_docker.info.logging,
ATTR_REGISTRIES: data_registries,
}
@api_process
async def options(self, request: web.Request) -> None:
"""Set docker options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_ENABLE_IPV6 in body:
self.sys_docker.config.enable_ipv6 = body[ATTR_ENABLE_IPV6]
await self.sys_docker.config.save_data()
@api_process @api_process
async def registries(self, request) -> dict[str, Any]: async def registries(self, request) -> dict[str, Any]:
"""Return the list of registries.""" """Return the list of registries."""
@ -83,14 +51,26 @@ class APIDocker(CoreSysAttributes):
for hostname, registry in body.items(): for hostname, registry in body.items():
self.sys_docker.config.registries[hostname] = registry self.sys_docker.config.registries[hostname] = registry
await self.sys_docker.config.save_data() self.sys_docker.config.save_data()
@api_process @api_process
async def remove_registry(self, request: web.Request): async def remove_registry(self, request: web.Request):
"""Delete a docker registry.""" """Delete a docker registry."""
hostname = request.match_info.get(ATTR_HOSTNAME) hostname = request.match_info.get(ATTR_HOSTNAME)
if hostname not in self.sys_docker.config.registries:
raise APINotFound(f"Hostname {hostname} does not exist in registries")
del self.sys_docker.config.registries[hostname] del self.sys_docker.config.registries[hostname]
await self.sys_docker.config.save_data() self.sys_docker.config.save_data()
@api_process
async def info(self, request: web.Request):
"""Get docker info."""
data_registries = {}
for hostname, registry in self.sys_docker.config.registries.items():
data_registries[hostname] = {
ATTR_USERNAME: registry[ATTR_USERNAME],
}
return {
ATTR_VERSION: self.sys_docker.info.version,
ATTR_STORAGE: self.sys_docker.info.storage,
ATTR_LOGGING: self.sys_docker.info.logging,
ATTR_REGISTRIES: data_registries,
}

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor hardware RESTful API.""" """Init file for Supervisor hardware RESTful API."""
import logging import logging
from typing import Any from typing import Any
@ -17,7 +16,7 @@ from ..const import (
ATTR_SYSTEM, ATTR_SYSTEM,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..dbus.udisks2 import UDisks2Manager from ..dbus.udisks2 import UDisks2
from ..dbus.udisks2.block import UDisks2Block from ..dbus.udisks2.block import UDisks2Block
from ..dbus.udisks2.drive import UDisks2Drive from ..dbus.udisks2.drive import UDisks2Drive
from ..hardware.data import Device from ..hardware.data import Device
@ -68,15 +67,12 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
ATTR_NAME: fs_block.id_label, ATTR_NAME: fs_block.id_label,
ATTR_SYSTEM: fs_block.hint_system, ATTR_SYSTEM: fs_block.hint_system,
ATTR_MOUNT_POINTS: [ ATTR_MOUNT_POINTS: [
str(mount_point) str(mount_point) for mount_point in fs_block.filesystem.mount_points
for mount_point in (
fs_block.filesystem.mount_points if fs_block.filesystem else []
)
], ],
} }
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]: def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
"""Return a dict with information of a disk to be used in the API.""" """Return a dict with information of a disk to be used in the API."""
return { return {
ATTR_VENDOR: drive.vendor, ATTR_VENDOR: drive.vendor,

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API.""" """Init file for Supervisor Home Assistant RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@ -13,7 +12,6 @@ from ..const import (
ATTR_AUDIO_INPUT, ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT, ATTR_AUDIO_OUTPUT,
ATTR_BACKUP, ATTR_BACKUP,
ATTR_BACKUPS_EXCLUDE_DATABASE,
ATTR_BLK_READ, ATTR_BLK_READ,
ATTR_BLK_WRITE, ATTR_BLK_WRITE,
ATTR_BOOT, ATTR_BOOT,
@ -35,10 +33,10 @@ from ..const import (
ATTR_WATCHDOG, ATTR_WATCHDOG,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag from ..validate import docker_image, network_port, version_tag
from .const import ATTR_FORCE, ATTR_SAFE_MODE from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -53,7 +51,6 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str), vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str), vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
} }
) )
@ -64,34 +61,10 @@ SCHEMA_UPDATE = vol.Schema(
} }
) )
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
SCHEMA_STOP = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
class APIHomeAssistant(CoreSysAttributes): class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions.""" """Handle RESTful API for Home Assistant functions."""
async def _check_offline_migration(self, force: bool = False) -> None:
"""Check and raise if there's an offline DB migration in progress."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Offline database migration in progress, try again after it has completed"
)
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return host information.""" """Return host information."""
@ -109,7 +82,6 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_WATCHDOG: self.sys_homeassistant.watchdog, ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input, ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output, ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
} }
@api_process @api_process
@ -118,10 +90,7 @@ class APIHomeAssistant(CoreSysAttributes):
body = await api_validate(SCHEMA_OPTIONS, request) body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_IMAGE in body: if ATTR_IMAGE in body:
self.sys_homeassistant.set_image(body[ATTR_IMAGE]) self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.override_image = (
self.sys_homeassistant.image != self.sys_homeassistant.default_image
)
if ATTR_BOOT in body: if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT] self.sys_homeassistant.boot = body[ATTR_BOOT]
@ -144,12 +113,7 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_AUDIO_OUTPUT in body: if ATTR_AUDIO_OUTPUT in body:
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT] self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_BACKUPS_EXCLUDE_DATABASE in body: self.sys_homeassistant.save_data()
self.sys_homeassistant.backups_exclude_database = body[
ATTR_BACKUPS_EXCLUDE_DATABASE
]
await self.sys_homeassistant.save_data()
@api_process @api_process
async def stats(self, request: web.Request) -> dict[Any, str]: async def stats(self, request: web.Request) -> dict[Any, str]:
@ -173,7 +137,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request: web.Request) -> None: async def update(self, request: web.Request) -> None:
"""Update Home Assistant.""" """Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request) body = await api_validate(SCHEMA_UPDATE, request)
await self._check_offline_migration()
await asyncio.shield( await asyncio.shield(
self.sys_homeassistant.core.update( self.sys_homeassistant.core.update(
@ -183,12 +146,9 @@ class APIHomeAssistant(CoreSysAttributes):
) )
@api_process @api_process
async def stop(self, request: web.Request) -> Awaitable[None]: def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop Home Assistant.""" """Stop Home Assistant."""
body = await api_validate(SCHEMA_STOP, request) return asyncio.shield(self.sys_homeassistant.core.stop())
await self._check_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_homeassistant.core.stop())
@api_process @api_process
def start(self, request: web.Request) -> Awaitable[None]: def start(self, request: web.Request) -> Awaitable[None]:
@ -196,24 +156,19 @@ class APIHomeAssistant(CoreSysAttributes):
return asyncio.shield(self.sys_homeassistant.core.start()) return asyncio.shield(self.sys_homeassistant.core.start())
@api_process @api_process
async def restart(self, request: web.Request) -> None: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Home Assistant.""" """Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request) return asyncio.shield(self.sys_homeassistant.core.restart())
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
)
@api_process @api_process
async def rebuild(self, request: web.Request) -> None: def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild Home Assistant.""" """Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request) return asyncio.shield(self.sys_homeassistant.core.rebuild())
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield( @api_process_raw(CONTENT_TYPE_BINARY)
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE]) def logs(self, request: web.Request) -> Awaitable[bytes]:
) """Return Home Assistant Docker logs."""
return self.sys_homeassistant.core.logs()
@api_process @api_process
async def check(self, request: web.Request) -> None: async def check(self, request: web.Request) -> None:

View File

@ -1,11 +1,9 @@
"""Init file for Supervisor host RESTful API.""" """Init file for Supervisor host RESTful API."""
import asyncio import asyncio
from contextlib import suppress from contextlib import suppress
import logging import logging
from typing import Any
from aiohttp import ClientConnectionResetError, ClientPayloadError, web from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol import voluptuous as vol
from voluptuous.error import CoerceInvalid from voluptuous.error import CoerceInvalid
@ -29,16 +27,8 @@ from ..const import (
ATTR_TIMEZONE, ATTR_TIMEZONE,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError from ..exceptions import APIError, HostLogError
from ..host.const import ( from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
PARAM_BOOT_ID,
PARAM_FOLLOW,
PARAM_SYSLOG_IDENTIFIER,
LogFormat,
LogFormatter,
)
from ..host.logs import SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX
from ..utils.systemd_journal import journal_logs_reader
from .const import ( from .const import (
ATTR_AGENT_VERSION, ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION, ATTR_APPARMOR_VERSION,
@ -48,48 +38,26 @@ from .const import (
ATTR_BROADCAST_MDNS, ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED, ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC, ATTR_DT_UTC,
ATTR_FORCE,
ATTR_IDENTIFIERS, ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME, ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME, ATTR_STARTUP_TIME,
ATTR_USE_NTP, ATTR_USE_NTP,
ATTR_VIRTUALIZATION,
CONTENT_TYPE_TEXT, CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
) )
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
IDENTIFIER = "identifier" IDENTIFIER = "identifier"
BOOTID = "bootid" BOOTID = "bootid"
DEFAULT_LINES = 100 DEFAULT_RANGE = 100
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str}) SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
# pylint: disable=no-value-for-parameter
SCHEMA_SHUTDOWN = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIHost(CoreSysAttributes): class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions.""" """Handle RESTful API for host functions."""
async def _check_ha_offline_migration(self, force: bool) -> None:
"""Check if HA has an offline migration in progress and raise if not forced."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
)
@api_process @api_process
async def info(self, request): async def info(self, request):
"""Return host information.""" """Return host information."""
@ -97,13 +65,12 @@ class APIHost(CoreSysAttributes):
ATTR_AGENT_VERSION: self.sys_dbus.agent.version, ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version, ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
ATTR_CHASSIS: self.sys_host.info.chassis, ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe, ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment, ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: await self.sys_host.info.free_space(), ATTR_DISK_FREE: self.sys_host.info.free_space,
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(), ATTR_DISK_TOTAL: self.sys_host.info.total_space,
ATTR_DISK_USED: await self.sys_host.info.used_space(), ATTR_DISK_USED: self.sys_host.info.used_space,
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(), ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
ATTR_FEATURES: self.sys_host.features, ATTR_FEATURES: self.sys_host.features,
ATTR_HOSTNAME: self.sys_host.info.hostname, ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname, ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
@ -131,20 +98,14 @@ class APIHost(CoreSysAttributes):
) )
@api_process @api_process
async def reboot(self, request): def reboot(self, request):
"""Reboot host.""" """Reboot host."""
body = await api_validate(SCHEMA_SHUTDOWN, request) return asyncio.shield(self.sys_host.control.reboot())
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.reboot())
@api_process @api_process
async def shutdown(self, request): def shutdown(self, request):
"""Poweroff host.""" """Poweroff host."""
body = await api_validate(SCHEMA_SHUTDOWN, request) return asyncio.shield(self.sys_host.control.shutdown())
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.shutdown())
@api_process @api_process
def reload(self, request): def reload(self, request):
@ -192,100 +153,50 @@ class APIHost(CoreSysAttributes):
raise APIError() from err raise APIError() from err
return possible_offset return possible_offset
async def advanced_logs_handler( @api_process
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse: ) -> web.StreamResponse:
"""Return systemd-journald logs.""" """Return systemd-journald logs."""
log_formatter = LogFormatter.PLAIN params = {}
params: dict[str, Any] = {}
if identifier: if identifier:
params[PARAM_SYSLOG_IDENTIFIER] = identifier params[PARAM_SYSLOG_IDENTIFIER] = identifier
elif IDENTIFIER in request.match_info: elif IDENTIFIER in request.match_info:
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info[IDENTIFIER] params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
else: else:
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
# host logs should be always verbose, no matter what Accept header is used
log_formatter = LogFormatter.VERBOSE
if BOOTID in request.match_info: if BOOTID in request.match_info:
params[PARAM_BOOT_ID] = await self._get_boot_id(request.match_info[BOOTID]) params[PARAM_BOOT_ID] = await self._get_boot_id(
request.match_info.get(BOOTID)
)
if follow: if follow:
params[PARAM_FOLLOW] = "" params[PARAM_FOLLOW] = ""
if ACCEPT in request.headers and request.headers[ACCEPT] not in [ if ACCEPT in request.headers and request.headers[ACCEPT] not in [
CONTENT_TYPE_TEXT, CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
"*/*", "*/*",
]: ]:
raise APIError( raise APIError(
"Invalid content type requested. Only text/plain and text/x-log " "Invalid content type requested. Only text/plain supported for now."
"supported for now."
) )
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG: if RANGE in request.headers:
log_formatter = LogFormatter.VERBOSE range_header = request.headers.get(RANGE)
if "lines" in request.query:
lines = request.query.get("lines", DEFAULT_LINES)
try:
lines = int(lines)
except ValueError:
# If the user passed a non-integer value, just use the default instead of error.
lines = DEFAULT_LINES
finally:
# We can't use the entries= Range header syntax to refer to the last 1 line,
# and passing 1 to the calculation below would return the 1st line of the logs
# instead. Since this is really an edge case that doesn't matter much, we'll just
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else lines}"
elif RANGE in request.headers:
range_header = request.headers[RANGE]
else: else:
range_header = f"entries=:-{DEFAULT_LINES - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else DEFAULT_LINES}" range_header = f"entries=:-{DEFAULT_RANGE}:"
async with self.sys_host.logs.journald_logs( async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL params=params, range_header=range_header
) as resp: ) as resp:
try: try:
response = web.StreamResponse() response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT response.content_type = CONTENT_TYPE_TEXT
headers_returned = False await response.prepare(request)
async for cursor, line in journal_logs_reader(resp, log_formatter): async for data in resp.content:
try: await response.write(data)
if not headers_returned: except ConnectionResetError as ex:
if cursor:
response.headers["X-First-Cursor"] = cursor
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request)
headers_returned = True
await response.write(line.encode("utf-8") + b"\n")
except ClientConnectionResetError as err:
# When client closes the connection while reading busy logs, we
# sometimes get this exception. It should be safe to ignore it.
_LOGGER.debug(
"ClientConnectionResetError raised when returning journal logs: %s",
err,
)
break
except ConnectionError as err:
_LOGGER.warning(
"%s raised when returning journal logs: %s",
type(err).__name__,
err,
)
break
except (ConnectionResetError, ClientPayloadError) as ex:
# ClientPayloadError is most likely caused by the closing the connection
raise APIError( raise APIError(
"Connection reset when trying to fetch data from systemd-journald." "Connection reset when trying to fetch data from systemd-journald."
) from ex ) from ex
return response return response
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs. Wrapped as standard API handler."""
return await self.advanced_logs_handler(request, identifier, follow)

View File

@ -1,5 +1,4 @@
"""Supervisor Add-on ingress service.""" """Supervisor Add-on ingress service."""
import asyncio import asyncio
from ipaddress import ip_address from ipaddress import ip_address
import logging import logging
@ -49,29 +48,6 @@ SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
) )
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
def must_be_empty_body(method: str, code: int) -> bool:
"""Check if a request must return an empty body."""
return (
status_code_must_be_empty_body(code)
or method_must_be_empty_body(method)
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
)
def method_must_be_empty_body(method: str) -> bool:
"""Check if a method must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
return method.upper() == hdrs.METH_HEAD
def status_code_must_be_empty_body(code: int) -> bool:
"""Check if a status code must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
return code in {204, 304} or 100 <= code < 200
class APIIngress(CoreSysAttributes): class APIIngress(CoreSysAttributes):
"""Ingress view to handle add-on webui routing.""" """Ingress view to handle add-on webui routing."""
@ -83,7 +59,7 @@ class APIIngress(CoreSysAttributes):
def _extract_addon(self, request: web.Request) -> Addon: def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist.""" """Return addon, throw an exception it it doesn't exist."""
token = request.match_info["token"] token = request.match_info.get("token")
# Find correct add-on # Find correct add-on
addon = self.sys_ingress.get(token) addon = self.sys_ingress.get(token)
@ -132,7 +108,7 @@ class APIIngress(CoreSysAttributes):
@api_process @api_process
@require_home_assistant @require_home_assistant
async def validate_session(self, request: web.Request) -> None: async def validate_session(self, request: web.Request) -> dict[str, Any]:
"""Validate session and extending how long it's valid for.""" """Validate session and extending how long it's valid for."""
data = await api_validate(VALIDATE_SESSION_DATA, request) data = await api_validate(VALIDATE_SESSION_DATA, request)
@ -147,14 +123,14 @@ class APIIngress(CoreSysAttributes):
"""Route data to Supervisor ingress service.""" """Route data to Supervisor ingress service."""
# Check Ingress Session # Check Ingress Session
session = request.cookies.get(COOKIE_INGRESS, "") session = request.cookies.get(COOKIE_INGRESS)
if not self.sys_ingress.validate_session(session): if not self.sys_ingress.validate_session(session):
_LOGGER.warning("No valid ingress session %s", session) _LOGGER.warning("No valid ingress session %s", session)
raise HTTPUnauthorized() raise HTTPUnauthorized()
# Process requests # Process requests
addon = self._extract_addon(request) addon = self._extract_addon(request)
path = request.match_info.get("path", "") path = request.match_info.get("path")
session_data = self.sys_ingress.get_session_data(session) session_data = self.sys_ingress.get_session_data(session)
try: try:
# Websocket # Websocket
@ -183,7 +159,7 @@ class APIIngress(CoreSysAttributes):
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
] ]
else: else:
req_protocols = [] req_protocols = ()
ws_server = web.WebSocketResponse( ws_server = web.WebSocketResponse(
protocols=req_protocols, autoclose=False, autoping=False protocols=req_protocols, autoclose=False, autoping=False
@ -249,18 +225,10 @@ class APIIngress(CoreSysAttributes):
skip_auto_headers={hdrs.CONTENT_TYPE}, skip_auto_headers={hdrs.CONTENT_TYPE},
) as result: ) as result:
headers = _response_header(result) headers = _response_header(result)
# Avoid parsing content_type in simple cases for better performance
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
content_type = (maybe_content_type.partition(";"))[0].strip()
else:
content_type = result.content_type
# Simple request # Simple request
if ( if (
# empty body responses should not be streamed, hdrs.CONTENT_LENGTH in result.headers
# otherwise aiohttp < 3.9.0 may generate
# an invalid "0\r\n\r\n" chunk instead of an empty response.
must_be_empty_body(request.method, result.status)
or hdrs.CONTENT_LENGTH in result.headers
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000 and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
): ):
# Return Response # Return Response
@ -268,18 +236,17 @@ class APIIngress(CoreSysAttributes):
return web.Response( return web.Response(
headers=headers, headers=headers,
status=result.status, status=result.status,
content_type=content_type, content_type=result.content_type,
body=body, body=body,
) )
# Stream response # Stream response
response = web.StreamResponse(status=result.status, headers=headers) response = web.StreamResponse(status=result.status, headers=headers)
response.content_type = content_type response.content_type = result.content_type
try: try:
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request) await response.prepare(request)
async for data, _ in result.content.iter_chunks(): async for data in result.content.iter_chunked(4096):
await response.write(data) await response.write(data)
except ( except (
@ -309,16 +276,14 @@ class APIIngress(CoreSysAttributes):
def _init_header( def _init_header(
request: web.Request, addon: Addon, session_data: IngressSessionData | None request: web.Request, addon: Addon, session_data: IngressSessionData | None
) -> CIMultiDict[str]: ) -> CIMultiDict | dict[str, str]:
"""Create initial header.""" """Create initial header."""
headers = CIMultiDict[str]() headers = {}
if session_data is not None: if session_data is not None:
headers[HEADER_REMOTE_USER_ID] = session_data.user.id headers[HEADER_REMOTE_USER_ID] = session_data.user.id
if session_data.user.username is not None: headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
if session_data.user.display_name is not None:
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
# filter flags # filter flags
for name, value in request.headers.items(): for name, value in request.headers.items():
@ -337,20 +302,19 @@ def _init_header(
istr(HEADER_REMOTE_USER_DISPLAY_NAME), istr(HEADER_REMOTE_USER_DISPLAY_NAME),
): ):
continue continue
headers.add(name, value) headers[name] = value
# Update X-Forwarded-For # Update X-Forwarded-For
if request.transport: forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR) connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
connected_ip = ip_address(request.transport.get_extra_info("peername")[0]) headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
return headers return headers
def _response_header(response: aiohttp.ClientResponse) -> CIMultiDict[str]: def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
"""Create response header.""" """Create response header."""
headers = CIMultiDict[str]() headers = {}
for name, value in response.headers.items(): for name, value in response.headers.items():
if name in ( if name in (
@ -360,7 +324,7 @@ def _response_header(response: aiohttp.ClientResponse) -> CIMultiDict[str]:
hdrs.CONTENT_ENCODING, hdrs.CONTENT_ENCODING,
): ):
continue continue
headers.add(name, value) headers[name] = value
return headers return headers

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor Jobs RESTful API.""" """Init file for Supervisor Jobs RESTful API."""
import logging import logging
from typing import Any from typing import Any
@ -7,7 +6,6 @@ from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, JobNotFound
from ..jobs import SupervisorJob from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS from .const import ATTR_JOBS
@ -23,24 +21,10 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes): class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions.""" """Handle RESTful API for OS functions."""
def _extract_job(self, request: web.Request) -> SupervisorJob: def _list_jobs(self) -> list[dict[str, Any]]:
"""Extract job from request or raise.""" """Return current job tree."""
try:
return self.sys_jobs.get_job(request.match_info["uuid"])
except JobNotFound:
raise APINotFound("Job does not exist") from None
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
"""Return current job tree.
Jobs are added to cache as they are created so by default they are in oldest to newest.
This is correct ordering for child jobs as it makes logical sense to present those in
the order they occurred within the parent. For the list as a whole, sort from newest
to oldest as its likely any client is most interested in the newer ones.
"""
# Initially sort oldest to newest so all child lists end up in correct order
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {} jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in sorted(self.sys_jobs.jobs): for job in self.sys_jobs.jobs:
if job.internal: if job.internal:
continue continue
@ -49,16 +33,10 @@ class APIJobs(CoreSysAttributes):
else: else:
jobs_by_parent[job.parent_id].append(job) jobs_by_parent[job.parent_id].append(job)
# After parent-child organization, sort the root jobs only from newest to oldest
job_list: list[dict[str, Any]] = [] job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = ( queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
[(job_list, start)] (job_list, job) for job in jobs_by_parent.get(None, [])
if start ]
else [
(job_list, job)
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
]
)
while queue: while queue:
(current_list, current_job) = queue.pop(0) (current_list, current_job) = queue.pop(0)
@ -71,10 +49,7 @@ class APIJobs(CoreSysAttributes):
if current_job.uuid in jobs_by_parent: if current_job.uuid in jobs_by_parent:
queue.extend( queue.extend(
[ [(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
(child_jobs, job)
for job in jobs_by_parent.get(current_job.uuid, [])
]
) )
return job_list return job_list
@ -95,27 +70,11 @@ class APIJobs(CoreSysAttributes):
if ATTR_IGNORE_CONDITIONS in body: if ATTR_IGNORE_CONDITIONS in body:
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS] self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
await self.sys_jobs.save_data() self.sys_jobs.save_data()
await self.sys_resolution.evaluate.evaluate_system() await self.sys_resolution.evaluate.evaluate_system()
@api_process @api_process
async def reset(self, request: web.Request) -> None: async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager.""" """Reset options for JobManager."""
await self.sys_jobs.reset_data() self.sys_jobs.reset_data()
@api_process
async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID."""
job = self._extract_job(request)
return self._list_jobs(job)[0]
@api_process
async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job."""
job = self._extract_job(request)
if not job.done:
raise APIError(f"Job {job.uuid} is not done!")
self.sys_jobs.remove_job(job)

View File

@ -1,17 +1,13 @@
"""Handle security part of this API.""" """Handle security part of this API."""
from collections.abc import Callable
import logging import logging
import re import re
from typing import Final from typing import Final
from urllib.parse import unquote from urllib.parse import unquote
from aiohttp.web import Request, Response, middleware from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion from awesomeversion import AwesomeVersion
from supervisor.homeassistant.const import LANDINGPAGE
from ...addons.const import RE_SLUG from ...addons.const import RE_SLUG
from ...const import ( from ...const import (
REQUEST_FROM, REQUEST_FROM,
@ -20,11 +16,10 @@ from ...const import (
ROLE_DEFAULT, ROLE_DEFAULT,
ROLE_HOMEASSISTANT, ROLE_HOMEASSISTANT,
ROLE_MANAGER, ROLE_MANAGER,
VALID_API_STATES, CoreState,
) )
from ...coresys import CoreSys, CoreSysAttributes from ...coresys import CoreSys, CoreSysAttributes
from ...utils import version_is_new_enough from ..utils import api_return_error, excract_supervisor_token
from ..utils import api_return_error, extract_supervisor_token
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
_CORE_VERSION: Final = AwesomeVersion("2023.3.4") _CORE_VERSION: Final = AwesomeVersion("2023.3.4")
@ -81,13 +76,6 @@ ADDONS_API_BYPASS: Final = re.compile(
r")$" r")$"
) )
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access # Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = { ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_DEFAULT: re.compile( ROLE_DEFAULT: re.compile(
@ -114,8 +102,6 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?" r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/audio/.+" r"|/audio/.+"
r"|/auth/cache" r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+" r"|/cli/.+"
r"|/core/.+" r"|/core/.+"
r"|/dns/.+" r"|/dns/.+"
@ -125,17 +111,16 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/hassos/.+" r"|/hassos/.+"
r"|/homeassistant/.+" r"|/homeassistant/.+"
r"|/host/.+" r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+" r"|/multicast/.+"
r"|/network/.+" r"|/network/.+"
r"|/observer/.+" r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+" r"|/os/.+"
r"|/refresh_updates"
r"|/resolution/.+" r"|/resolution/.+"
r"|/security/.+" r"|/backups.*"
r"|/snapshots.*" r"|/snapshots.*"
r"|/store.*" r"|/store.*"
r"|/supervisor/.+" r"|/supervisor/.+"
r"|/security/.+"
r")$" r")$"
), ),
ROLE_ADMIN: re.compile( ROLE_ADMIN: re.compile(
@ -180,7 +165,9 @@ class SecurityMiddleware(CoreSysAttributes):
return unquoted return unquoted
@middleware @middleware
async def block_bad_requests(self, request: Request, handler: Callable) -> Response: async def block_bad_requests(
self, request: Request, handler: RequestHandler
) -> Response:
"""Process request and tblock commonly known exploit attempts.""" """Process request and tblock commonly known exploit attempts."""
if FILTERS.search(self._recursive_unquote(request.path)): if FILTERS.search(self._recursive_unquote(request.path)):
_LOGGER.warning( _LOGGER.warning(
@ -198,9 +185,15 @@ class SecurityMiddleware(CoreSysAttributes):
return await handler(request) return await handler(request)
@middleware @middleware
async def system_validation(self, request: Request, handler: Callable) -> Response: async def system_validation(
self, request: Request, handler: RequestHandler
) -> Response:
"""Check if core is ready to response.""" """Check if core is ready to response."""
if self.sys_core.state not in VALID_API_STATES: if self.sys_core.state not in (
CoreState.STARTUP,
CoreState.RUNNING,
CoreState.FREEZE,
):
return api_return_error( return api_return_error(
message=f"System is not ready with state: {self.sys_core.state}" message=f"System is not ready with state: {self.sys_core.state}"
) )
@ -208,10 +201,12 @@ class SecurityMiddleware(CoreSysAttributes):
return await handler(request) return await handler(request)
@middleware @middleware
async def token_validation(self, request: Request, handler: Callable) -> Response: async def token_validation(
self, request: Request, handler: RequestHandler
) -> Response:
"""Check security access of this layer.""" """Check security access of this layer."""
request_from: CoreSysAttributes | None = None request_from = None
supervisor_token = extract_supervisor_token(request) supervisor_token = excract_supervisor_token(request)
# Blacklist # Blacklist
if BLACKLIST.match(request.path): if BLACKLIST.match(request.path):
@ -233,9 +228,6 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token: if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path) _LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant request_from = self.sys_homeassistant
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host # Host
if supervisor_token == self.sys_plugins.cli.supervisor_token: if supervisor_token == self.sys_plugins.cli.supervisor_token:
@ -279,12 +271,11 @@ class SecurityMiddleware(CoreSysAttributes):
raise HTTPForbidden() raise HTTPForbidden()
@middleware @middleware
async def core_proxy(self, request: Request, handler: Callable) -> Response: async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy.""" """Validate user from Core API proxy."""
if ( if (
request[REQUEST_FROM] != self.sys_homeassistant request[REQUEST_FROM] != self.sys_homeassistant
or self.sys_homeassistant.version == LANDINGPAGE or self.sys_homeassistant.version >= _CORE_VERSION
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
): ):
return await handler(request) return await handler(request)

View File

@ -1,17 +1,17 @@
"""Inits file for supervisor mounts REST API.""" """Inits file for supervisor mounts REST API."""
from typing import Any, cast from typing import Any
from aiohttp import web from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..const import ATTR_NAME, ATTR_STATE from ..const import ATTR_NAME, ATTR_STATE
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound from ..exceptions import APIError
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
from ..mounts.mount import Mount from ..mounts.mount import Mount
from ..mounts.validate import SCHEMA_MOUNT_CONFIG, MountData from ..mounts.validate import SCHEMA_MOUNT_CONFIG
from .const import ATTR_MOUNTS, ATTR_USER_PATH from .const import ATTR_MOUNTS
from .utils import api_process, api_validate from .utils import api_process, api_validate
SCHEMA_OPTIONS = vol.Schema( SCHEMA_OPTIONS = vol.Schema(
@ -24,13 +24,6 @@ SCHEMA_OPTIONS = vol.Schema(
class APIMounts(CoreSysAttributes): class APIMounts(CoreSysAttributes):
"""Handle REST API for mounting options.""" """Handle REST API for mounting options."""
def _extract_mount(self, request: web.Request) -> Mount:
"""Extract mount from request or raise."""
name = request.match_info["mount"]
if name not in self.sys_mounts:
raise APINotFound(f"No mount exists with name {name}")
return self.sys_mounts.get(name)
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return MountManager info.""" """Return MountManager info."""
@ -39,13 +32,7 @@ class APIMounts(CoreSysAttributes):
if self.sys_mounts.default_backup_mount if self.sys_mounts.default_backup_mount
else None, else None,
ATTR_MOUNTS: [ ATTR_MOUNTS: [
mount.to_dict() mount.to_dict() | {ATTR_STATE: mount.state}
| {
ATTR_STATE: mount.state,
ATTR_USER_PATH: mount.container_where.as_posix()
if mount.container_where
else None,
}
for mount in self.sys_mounts.mounts for mount in self.sys_mounts.mounts
], ],
} }
@ -66,15 +53,15 @@ class APIMounts(CoreSysAttributes):
else: else:
self.sys_mounts.default_backup_mount = mount self.sys_mounts.default_backup_mount = mount
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def create_mount(self, request: web.Request) -> None: async def create_mount(self, request: web.Request) -> None:
"""Create a new mount in supervisor.""" """Create a new mount in supervisor."""
body = cast(MountData, await api_validate(SCHEMA_MOUNT_CONFIG, request)) body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
if body["name"] in self.sys_mounts: if body[ATTR_NAME] in self.sys_mounts:
raise APIError(f"A mount already exists with name {body['name']}") raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
mount = Mount.from_dict(self.coresys, body) mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount) await self.sys_mounts.create_mount(mount)
@ -87,20 +74,19 @@ class APIMounts(CoreSysAttributes):
if not self.sys_mounts.default_backup_mount: if not self.sys_mounts.default_backup_mount:
self.sys_mounts.default_backup_mount = mount self.sys_mounts.default_backup_mount = mount
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def update_mount(self, request: web.Request) -> None: async def update_mount(self, request: web.Request) -> None:
"""Update an existing mount in supervisor.""" """Update an existing mount in supervisor."""
current = self._extract_mount(request) name = request.match_info.get("mount")
name_schema = vol.Schema( name_schema = vol.Schema(
{vol.Optional(ATTR_NAME, default=current.name): current.name}, {vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
extra=vol.ALLOW_EXTRA,
)
body = cast(
MountData,
await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request),
) )
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
if name not in self.sys_mounts:
raise APIError(f"No mount exists with name {name}")
mount = Mount.from_dict(self.coresys, body) mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount) await self.sys_mounts.create_mount(mount)
@ -113,26 +99,26 @@ class APIMounts(CoreSysAttributes):
elif self.sys_mounts.default_backup_mount == mount: elif self.sys_mounts.default_backup_mount == mount:
self.sys_mounts.default_backup_mount = None self.sys_mounts.default_backup_mount = None
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def delete_mount(self, request: web.Request) -> None: async def delete_mount(self, request: web.Request) -> None:
"""Delete an existing mount in supervisor.""" """Delete an existing mount in supervisor."""
current = self._extract_mount(request) name = request.match_info.get("mount")
mount = await self.sys_mounts.remove_mount(current.name) mount = await self.sys_mounts.remove_mount(name)
# If it was a backup mount, reload backups # If it was a backup mount, reload backups
if mount.usage == MountUsage.BACKUP: if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload()) self.sys_create_task(self.sys_backups.reload())
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def reload_mount(self, request: web.Request) -> None: async def reload_mount(self, request: web.Request) -> None:
"""Reload an existing mount in supervisor.""" """Reload an existing mount in supervisor."""
mount = self._extract_mount(request) name = request.match_info.get("mount")
await self.sys_mounts.reload_mount(mount.name) await self.sys_mounts.reload_mount(name)
# If it's a backup mount, reload backups # If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP: if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload()) self.sys_create_task(self.sys_backups.reload())

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor Multicast RESTful API.""" """Init file for Supervisor Multicast RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
@ -24,7 +23,8 @@ from ..const import (
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import version_tag from ..validate import version_tag
from .utils import api_process, api_validate from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -69,6 +69,11 @@ class APIMulticast(CoreSysAttributes):
raise APIError(f"Version {version} is already in use") raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.multicast.update(version)) await asyncio.shield(self.sys_plugins.multicast.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Multicast Docker logs."""
return self.sys_plugins.multicast.logs()
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Multicast plugin.""" """Restart Multicast plugin."""

View File

@ -1,8 +1,8 @@
"""REST API for network.""" """REST API for network."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any from typing import Any
from aiohttp import web from aiohttp import web
@ -10,7 +10,6 @@ import voluptuous as vol
from ..const import ( from ..const import (
ATTR_ACCESSPOINTS, ATTR_ACCESSPOINTS,
ATTR_ADDR_GEN_MODE,
ATTR_ADDRESS, ATTR_ADDRESS,
ATTR_AUTH, ATTR_AUTH,
ATTR_CONNECTED, ATTR_CONNECTED,
@ -23,7 +22,6 @@ from ..const import (
ATTR_ID, ATTR_ID,
ATTR_INTERFACE, ATTR_INTERFACE,
ATTR_INTERFACES, ATTR_INTERFACES,
ATTR_IP6_PRIVACY,
ATTR_IPV4, ATTR_IPV4,
ATTR_IPV6, ATTR_IPV6,
ATTR_MAC, ATTR_MAC,
@ -40,43 +38,28 @@ from ..const import (
ATTR_TYPE, ATTR_TYPE,
ATTR_VLAN, ATTR_VLAN,
ATTR_WIFI, ATTR_WIFI,
DOCKER_IPV4_NETWORK_MASK,
DOCKER_NETWORK, DOCKER_NETWORK,
DOCKER_NETWORK_MASK,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, HostNetworkNotFound from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import ( from ..host.configuration import (
AccessPoint, AccessPoint,
Interface, Interface,
InterfaceAddrGenMode,
InterfaceIp6Privacy,
InterfaceMethod, InterfaceMethod,
Ip6Setting,
IpConfig, IpConfig,
IpSetting,
VlanConfig, VlanConfig,
WifiConfig, WifiConfig,
) )
from ..host.const import AuthMethod, InterfaceType, WifiMode from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate from .utils import api_process, api_validate
_SCHEMA_IPV4_CONFIG = vol.Schema( _SCHEMA_IP_CONFIG = vol.Schema(
{ {
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)], vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod), vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address), vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)], vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
}
)
_SCHEMA_IPV6_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_ADDR_GEN_MODE): vol.Coerce(InterfaceAddrGenMode),
vol.Optional(ATTR_IP6_PRIVACY): vol.Coerce(InterfaceIp6Privacy),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
} }
) )
@ -93,31 +76,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_UPDATE = vol.Schema( SCHEMA_UPDATE = vol.Schema(
{ {
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG, vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG, vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG, vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
vol.Optional(ATTR_ENABLED): vol.Boolean(), vol.Optional(ATTR_ENABLED): vol.Boolean(),
} }
) )
def ip4config_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]: def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
"""Return a dict with information about IPv4 configuration.""" """Return a dict with information about ip configuration."""
return { return {
ATTR_METHOD: setting.method, ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
ATTR_READY: config.ready,
}
def ip6config_struct(config: IpConfig, setting: Ip6Setting) -> dict[str, Any]:
"""Return a dict with information about IPv6 configuration."""
return {
ATTR_METHOD: setting.method,
ATTR_ADDR_GEN_MODE: setting.addr_gen_mode,
ATTR_IP6_PRIVACY: setting.ip6_privacy,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address], ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers], ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None, ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
@ -152,12 +122,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_CONNECTED: interface.connected, ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary, ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac, ATTR_MAC: interface.mac,
ATTR_IPV4: ip4config_struct(interface.ipv4, interface.ipv4setting) ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
if interface.ipv4 and interface.ipv4setting ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
else None,
ATTR_IPV6: ip6config_struct(interface.ipv6, interface.ipv6setting)
if interface.ipv6 and interface.ipv6setting
else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None, ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None, ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
} }
@ -191,7 +157,7 @@ class APINetwork(CoreSysAttributes):
except HostNetworkNotFound: except HostNetworkNotFound:
pass pass
raise APINotFound(f"Interface {name} does not exist") from None raise APIError(f"Interface {name} does not exist") from None
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
@ -203,7 +169,7 @@ class APINetwork(CoreSysAttributes):
], ],
ATTR_DOCKER: { ATTR_DOCKER: {
ATTR_INTERFACE: DOCKER_NETWORK, ATTR_INTERFACE: DOCKER_NETWORK,
ATTR_ADDRESS: str(DOCKER_IPV4_NETWORK_MASK), ATTR_ADDRESS: str(DOCKER_NETWORK_MASK),
ATTR_GATEWAY: str(self.sys_docker.network.gateway), ATTR_GATEWAY: str(self.sys_docker.network.gateway),
ATTR_DNS: str(self.sys_docker.network.dns), ATTR_DNS: str(self.sys_docker.network.dns),
}, },
@ -214,14 +180,14 @@ class APINetwork(CoreSysAttributes):
@api_process @api_process
async def interface_info(self, request: web.Request) -> dict[str, Any]: async def interface_info(self, request: web.Request) -> dict[str, Any]:
"""Return network information for a interface.""" """Return network information for a interface."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
return interface_struct(interface) return interface_struct(interface)
@api_process @api_process
async def interface_update(self, request: web.Request) -> None: async def interface_update(self, request: web.Request) -> None:
"""Update the configuration of an interface.""" """Update the configuration of an interface."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
# Validate data # Validate data
body = await api_validate(SCHEMA_UPDATE, request) body = await api_validate(SCHEMA_UPDATE, request)
@ -231,32 +197,24 @@ class APINetwork(CoreSysAttributes):
# Apply config # Apply config
for key, config in body.items(): for key, config in body.items():
if key == ATTR_IPV4: if key == ATTR_IPV4:
interface.ipv4setting = IpSetting( interface.ipv4 = replace(
method=config.get(ATTR_METHOD, InterfaceMethod.STATIC), interface.ipv4
address=config.get(ATTR_ADDRESS, []), or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
gateway=config.get(ATTR_GATEWAY), **config,
nameservers=config.get(ATTR_NAMESERVERS, []),
) )
elif key == ATTR_IPV6: elif key == ATTR_IPV6:
interface.ipv6setting = Ip6Setting( interface.ipv6 = replace(
method=config.get(ATTR_METHOD, InterfaceMethod.STATIC), interface.ipv6
addr_gen_mode=config.get( or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
ATTR_ADDR_GEN_MODE, InterfaceAddrGenMode.DEFAULT **config,
),
ip6_privacy=config.get(
ATTR_IP6_PRIVACY, InterfaceIp6Privacy.DEFAULT
),
address=config.get(ATTR_ADDRESS, []),
gateway=config.get(ATTR_GATEWAY),
nameservers=config.get(ATTR_NAMESERVERS, []),
) )
elif key == ATTR_WIFI: elif key == ATTR_WIFI:
interface.wifi = WifiConfig( interface.wifi = replace(
mode=config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE), interface.wifi
ssid=config.get(ATTR_SSID, ""), or WifiConfig(
auth=config.get(ATTR_AUTH, AuthMethod.OPEN), WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
psk=config.get(ATTR_PSK, None), ),
signal=None, **config,
) )
elif key == ATTR_ENABLED: elif key == ATTR_ENABLED:
interface.enabled = config interface.enabled = config
@ -273,7 +231,7 @@ class APINetwork(CoreSysAttributes):
@api_process @api_process
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]: async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
"""Scan and return a list of available networks.""" """Scan and return a list of available networks."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
# Only wlan is supported # Only wlan is supported
if interface.type != InterfaceType.WIRELESS: if interface.type != InterfaceType.WIRELESS:
@ -286,10 +244,8 @@ class APINetwork(CoreSysAttributes):
@api_process @api_process
async def create_vlan(self, request: web.Request) -> None: async def create_vlan(self, request: web.Request) -> None:
"""Create a new vlan.""" """Create a new vlan."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
vlan = int(request.match_info.get(ATTR_VLAN, -1)) vlan = int(request.match_info.get(ATTR_VLAN))
if vlan < 0:
raise APIError(f"Invalid vlan specified: {vlan}")
# Only ethernet is supported # Only ethernet is supported
if interface.type != InterfaceType.ETHERNET: if interface.type != InterfaceType.ETHERNET:
@ -300,28 +256,24 @@ class APINetwork(CoreSysAttributes):
vlan_config = VlanConfig(vlan, interface.name) vlan_config = VlanConfig(vlan, interface.name)
ipv4_setting = None ipv4_config = None
if ATTR_IPV4 in body: if ATTR_IPV4 in body:
ipv4_setting = IpSetting( ipv4_config = IpConfig(
method=body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO), body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
address=body[ATTR_IPV4].get(ATTR_ADDRESS, []), body[ATTR_IPV4].get(ATTR_ADDRESS, []),
gateway=body[ATTR_IPV4].get(ATTR_GATEWAY, None), body[ATTR_IPV4].get(ATTR_GATEWAY, None),
nameservers=body[ATTR_IPV4].get(ATTR_NAMESERVERS, []), body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
None,
) )
ipv6_setting = None ipv6_config = None
if ATTR_IPV6 in body: if ATTR_IPV6 in body:
ipv6_setting = Ip6Setting( ipv6_config = IpConfig(
method=body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO), body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
addr_gen_mode=body[ATTR_IPV6].get( body[ATTR_IPV6].get(ATTR_ADDRESS, []),
ATTR_ADDR_GEN_MODE, InterfaceAddrGenMode.DEFAULT body[ATTR_IPV6].get(ATTR_GATEWAY, None),
), body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
ip6_privacy=body[ATTR_IPV6].get( None,
ATTR_IP6_PRIVACY, InterfaceIp6Privacy.DEFAULT
),
address=body[ATTR_IPV6].get(ATTR_ADDRESS, []),
gateway=body[ATTR_IPV6].get(ATTR_GATEWAY, None),
nameservers=body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
) )
vlan_interface = Interface( vlan_interface = Interface(
@ -332,10 +284,8 @@ class APINetwork(CoreSysAttributes):
True, True,
False, False,
InterfaceType.VLAN, InterfaceType.VLAN,
None, ipv4_config,
ipv4_setting, ipv6_config,
None,
ipv6_setting,
None, None,
vlan_config, vlan_config,
) )

View File

@ -1,5 +1,4 @@
"""Init file for Supervisor Observer RESTful API.""" """Init file for Supervisor Observer RESTful API."""
import asyncio import asyncio
import logging import logging
from typing import Any from typing import Any

View File

@ -1,59 +1,47 @@
"""Init file for Supervisor HassOS RESTful API.""" """Init file for Supervisor HassOS RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
import re
from typing import Any from typing import Any
from aiohttp import web from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..const import ( from ..const import (
ATTR_ACTIVITY_LED,
ATTR_BOARD, ATTR_BOARD,
ATTR_BOOT, ATTR_BOOT,
ATTR_DEVICES, ATTR_DEVICES,
ATTR_DISK_LED,
ATTR_HEARTBEAT_LED,
ATTR_ID, ATTR_ID,
ATTR_NAME, ATTR_NAME,
ATTR_POWER_LED,
ATTR_SERIAL, ATTR_SERIAL,
ATTR_SIZE, ATTR_SIZE,
ATTR_STATE,
ATTR_SWAP_SIZE,
ATTR_SWAPPINESS,
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APINotFound, BoardInvalidError from ..exceptions import BoardInvalidError
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..validate import version_tag from ..validate import version_tag
from .const import ( from .const import (
ATTR_BOOT_SLOT,
ATTR_BOOT_SLOTS,
ATTR_DATA_DISK, ATTR_DATA_DISK,
ATTR_DEV_PATH, ATTR_DEV_PATH,
ATTR_DEVICE, ATTR_DEVICE,
ATTR_DISK_LED,
ATTR_DISKS, ATTR_DISKS,
ATTR_HEARTBEAT_LED,
ATTR_MODEL, ATTR_MODEL,
ATTR_STATUS, ATTR_POWER_LED,
ATTR_SYSTEM_HEALTH_LED,
ATTR_VENDOR, ATTR_VENDOR,
BootSlot,
) )
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str}) SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
# pylint: disable=no-value-for-parameter
SCHEMA_YELLOW_OPTIONS = vol.Schema( SCHEMA_YELLOW_OPTIONS = vol.Schema(
{ {
vol.Optional(ATTR_DISK_LED): vol.Boolean(), vol.Optional(ATTR_DISK_LED): vol.Boolean(),
@ -61,23 +49,6 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema(
vol.Optional(ATTR_POWER_LED): vol.Boolean(), vol.Optional(ATTR_POWER_LED): vol.Boolean(),
} }
) )
SCHEMA_GREEN_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
}
)
RE_SWAP_SIZE = re.compile(r"^\d+([KMG](i?B)?|B)?$", re.IGNORECASE)
SCHEMA_SWAP_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SWAP_SIZE): vol.Match(RE_SWAP_SIZE),
vol.Optional(ATTR_SWAPPINESS): vol.All(int, vol.Range(min=0, max=200)),
}
)
# pylint: enable=no-value-for-parameter
class APIOS(CoreSysAttributes): class APIOS(CoreSysAttributes):
@ -93,15 +64,6 @@ class APIOS(CoreSysAttributes):
ATTR_BOARD: self.sys_os.board, ATTR_BOARD: self.sys_os.board,
ATTR_BOOT: self.sys_dbus.rauc.boot_slot, ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id, ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
ATTR_BOOT_SLOTS: {
slot.bootname: {
ATTR_STATE: slot.state,
ATTR_STATUS: slot.boot_status,
ATTR_VERSION: slot.bundle_version,
}
for slot in self.sys_os.slots
if slot.bootname
},
} }
@api_process @api_process
@ -124,17 +86,6 @@ class APIOS(CoreSysAttributes):
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE])) await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
@api_process
def wipe_data(self, request: web.Request) -> Awaitable[None]:
"""Trigger data disk wipe on Host."""
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
@api_process
async def set_boot_slot(self, request: web.Request) -> None:
"""Change the active boot slot and reboot into it."""
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
@api_process @api_process
async def list_data(self, request: web.Request) -> dict[str, Any]: async def list_data(self, request: web.Request) -> dict[str, Any]:
"""Return possible data targets.""" """Return possible data targets."""
@ -154,35 +105,6 @@ class APIOS(CoreSysAttributes):
], ],
} }
@api_process
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
"""Get green board settings."""
return {
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
}
@api_process
async def boards_green_options(self, request: web.Request) -> None:
"""Update green board settings."""
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
if ATTR_ACTIVITY_LED in body:
await self.sys_dbus.agent.board.green.set_activity_led(
body[ATTR_ACTIVITY_LED]
)
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
if ATTR_SYSTEM_HEALTH_LED in body:
await self.sys_dbus.agent.board.green.set_user_led(
body[ATTR_SYSTEM_HEALTH_LED]
)
await self.sys_dbus.agent.board.green.save_data()
@api_process @api_process
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]: async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
"""Get yellow board settings.""" """Get yellow board settings."""
@ -198,17 +120,14 @@ class APIOS(CoreSysAttributes):
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request) body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
if ATTR_DISK_LED in body: if ATTR_DISK_LED in body:
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED]) self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
if ATTR_HEARTBEAT_LED in body: if ATTR_HEARTBEAT_LED in body:
await self.sys_dbus.agent.board.yellow.set_heartbeat_led( self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
body[ATTR_HEARTBEAT_LED]
)
if ATTR_POWER_LED in body: if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED]) self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
await self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue( self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED, IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM, ContextType.SYSTEM,
@ -224,53 +143,3 @@ class APIOS(CoreSysAttributes):
) )
return {} return {}
@api_process
async def config_swap_info(self, request: web.Request) -> dict[str, Any]:
"""Get swap settings."""
if (
not self.coresys.os.available
or not self.coresys.os.version
or self.coresys.os.version < "15.0"
):
raise APINotFound(
"Home Assistant OS 15.0 or newer required for swap settings"
)
return {
ATTR_SWAP_SIZE: self.sys_dbus.agent.swap.swap_size,
ATTR_SWAPPINESS: self.sys_dbus.agent.swap.swappiness,
}
@api_process
async def config_swap_options(self, request: web.Request) -> None:
"""Update swap settings."""
if (
not self.coresys.os.available
or not self.coresys.os.version
or self.coresys.os.version < "15.0"
):
raise APINotFound(
"Home Assistant OS 15.0 or newer required for swap settings"
)
body = await api_validate(SCHEMA_SWAP_OPTIONS, request)
reboot_required = False
if ATTR_SWAP_SIZE in body:
old_size = self.sys_dbus.agent.swap.swap_size
await self.sys_dbus.agent.swap.set_swap_size(body[ATTR_SWAP_SIZE])
reboot_required = reboot_required or old_size != body[ATTR_SWAP_SIZE]
if ATTR_SWAPPINESS in body:
old_swappiness = self.sys_dbus.agent.swap.swappiness
await self.sys_dbus.agent.swap.set_swappiness(body[ATTR_SWAPPINESS])
reboot_required = reboot_required or old_swappiness != body[ATTR_SWAPPINESS]
if reboot_required:
self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM,
suggestions=[SuggestionType.EXECUTE_REBOOT],
)

View File

@ -1 +1 @@
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([5-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(1{2}\d|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[5-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.35399ae87c70acf8.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}else d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}() !function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
{"version":3,"file":"1081.91949d686e61cc12.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"qXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,IACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}

View File

@ -1,2 +0,0 @@
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}

Some files were not shown because too many files have changed in this diff Show More