mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-02 13:57:42 +00:00
Compare commits
No commits in common. "main" and "2023.06.1" have entirely different histories.
@ -1,51 +1,40 @@
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
|
||||
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
||||
"containerEnv": {
|
||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||
},
|
||||
"remoteEnv": {
|
||||
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
|
||||
},
|
||||
"appPort": ["9123:8123", "7357:4357"],
|
||||
"postCreateCommand": "bash devcontainer_setup",
|
||||
"postStartCommand": "bash devcontainer_bootstrap",
|
||||
"postCreateCommand": "bash devcontainer_bootstrap",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.pylint",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"redhat.vscode-yaml",
|
||||
"esbenp.prettier-vscode",
|
||||
"GitHub.vscode-pull-request-github"
|
||||
],
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/usr/bin/zsh"
|
||||
}
|
||||
},
|
||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"mounts": ["type=volume,target=/var/lib/docker"],
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/usr/bin/zsh"
|
||||
}
|
||||
}
|
||||
},
|
||||
"mounts": [
|
||||
"type=volume,target=/var/lib/docker",
|
||||
"type=volume,target=/mnt/supervisor"
|
||||
]
|
||||
},
|
||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py310"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
||||
|
69
.github/ISSUE_TEMPLATE.md
vendored
Normal file
69
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
---
|
||||
name: Report a bug with the Supervisor on a supported System
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
---
|
||||
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with an add-on, make an issue in it's repository.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||
-->
|
||||
|
||||
### Describe the issue
|
||||
|
||||
<!-- Provide as many details as possible. -->
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
<!-- What do you do to encounter the issue. -->
|
||||
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
### Enviroment details
|
||||
|
||||
<!-- You can find these details in the system tab of the supervisor panel, or by using the `ha` CLI. -->
|
||||
|
||||
- **Operating System:**: xxx
|
||||
- **Supervisor version:**: xxx
|
||||
- **Home Assistant version**: xxx
|
||||
|
||||
### Supervisor logs
|
||||
|
||||
<details>
|
||||
<summary>Supervisor logs</summary>
|
||||
<!--
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
- Logs are more than just errors, even if you don't think it's important, it is.
|
||||
-->
|
||||
|
||||
```
|
||||
Paste supervisor logs here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### System Information
|
||||
|
||||
<details>
|
||||
<summary>System Information</summary>
|
||||
<!--
|
||||
- Use this command: ha info
|
||||
-->
|
||||
|
||||
```
|
||||
Paste system info here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
20
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
20
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,5 +1,6 @@
|
||||
name: Report an issue with Home Assistant Supervisor
|
||||
name: Bug Report Form
|
||||
description: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@ -8,7 +9,7 @@ body:
|
||||
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
|
||||
[fr]: https://github.com/orgs/home-assistant/discussions
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
@ -25,7 +26,7 @@ body:
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
description: >
|
||||
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
It is listed as the `Installation Type` value.
|
||||
options:
|
||||
- Home Assistant OS
|
||||
@ -71,21 +72,20 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: System information
|
||||
label: System Health information
|
||||
description: >
|
||||
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
|
||||
Click the copy button at the bottom of the pop-up and paste it here.
|
||||
|
||||
|
||||
[](https://my.home-assistant.io/redirect/system_health/)
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Supervisor diagnostics
|
||||
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
|
||||
description: >-
|
||||
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
|
||||
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
|
||||
and select 'Download diagnostics'.
|
||||
|
||||
Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
|
||||
Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
|
||||
|
||||
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -13,7 +13,7 @@ contact_links:
|
||||
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
||||
|
||||
- name: Request a feature for the Supervisor
|
||||
url: https://github.com/orgs/home-assistant/discussions
|
||||
url: https://community.home-assistant.io/c/feature-requests
|
||||
about: Request an new feature for the Supervisor.
|
||||
|
||||
- name: I have a question or need support
|
||||
|
53
.github/ISSUE_TEMPLATE/task.yml
vendored
53
.github/ISSUE_TEMPLATE/task.yml
vendored
@ -1,53 +0,0 @@
|
||||
name: Task
|
||||
description: For staff only - Create a task
|
||||
type: Task
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## ⚠️ RESTRICTED ACCESS
|
||||
|
||||
**This form is restricted to Open Home Foundation staff and authorized contributors only.**
|
||||
|
||||
If you are a community member wanting to contribute, please:
|
||||
- For bug reports: Use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)
|
||||
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
|
||||
|
||||
---
|
||||
|
||||
### For authorized contributors
|
||||
|
||||
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
||||
|
||||
Be specific about what needs to be done, why it's important, and any constraints or requirements.
|
||||
placeholder: |
|
||||
Describe the task, including:
|
||||
- What needs to be done
|
||||
- Why this task is needed
|
||||
- Expected outcome
|
||||
- Any constraints or requirements
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional_context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Any additional information, links, research, or context that would be helpful.
|
||||
|
||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
||||
placeholder: |
|
||||
- Roadmap opportunity: [link]
|
||||
- Epic: [link]
|
||||
- Feature request: [link]
|
||||
- Technical design documents: [link]
|
||||
- Prototype/mockup: [link]
|
||||
- Dependencies: [links]
|
||||
validations:
|
||||
required: false
|
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -38,7 +38,6 @@
|
||||
- This PR is related to issue:
|
||||
- Link to documentation pull request:
|
||||
- Link to cli pull request:
|
||||
- Link to client library pull request:
|
||||
|
||||
## Checklist
|
||||
|
||||
@ -53,14 +52,12 @@
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If API endpoints or add-on configuration are added/changed:
|
||||
If API endpoints of add-on configuration are added/changed:
|
||||
|
||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||
- [ ] [CLI][cli-repository] updated (if necessary)
|
||||
- [ ] [Client library][client-library-repository] updated (if necessary)
|
||||
|
||||
<!--
|
||||
Thank you for contributing <3
|
||||
@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
|
||||
|
||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
||||
[cli-repository]: https://github.com/home-assistant/cli
|
||||
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/
|
||||
|
288
.github/copilot-instructions.md
vendored
288
.github/copilot-instructions.md
vendored
@ -1,288 +0,0 @@
|
||||
# GitHub Copilot & Claude Code Instructions
|
||||
|
||||
This repository contains the Home Assistant Supervisor, a Python 3 based container
|
||||
orchestration and management system for Home Assistant.
|
||||
|
||||
## Supervisor Capabilities & Features
|
||||
|
||||
### Architecture Overview
|
||||
|
||||
Home Assistant Supervisor is a Python-based container orchestration system that
|
||||
communicates with the Docker daemon to manage containerized components. It is tightly
|
||||
integrated with the underlying Operating System and core Operating System components
|
||||
through D-Bus.
|
||||
|
||||
**Managed Components:**
|
||||
- **Home Assistant Core**: The main home automation application running in its own
|
||||
container (also provides the web interface)
|
||||
- **Add-ons**: Third-party applications and services (each add-on runs in its own
|
||||
container)
|
||||
- **Plugins**: Built-in system services like DNS, Audio, CLI, Multicast, and Observer
|
||||
- **Host System Integration**: OS-level operations and hardware access via D-Bus
|
||||
- **Container Networking**: Internal Docker network management and external
|
||||
connectivity
|
||||
- **Storage & Backup**: Data persistence and backup management across all containers
|
||||
|
||||
**Key Dependencies:**
|
||||
- **Docker Engine**: Required for all container operations
|
||||
- **D-Bus**: System-level communication with the host OS
|
||||
- **systemd**: Service management for host system operations
|
||||
- **NetworkManager**: Network configuration and management
|
||||
|
||||
### Add-on System
|
||||
|
||||
**Add-on Architecture**: Add-ons are containerized applications available through
|
||||
add-on stores. Each store contains multiple add-ons, and each add-on includes metadata
|
||||
that tells Supervisor the version, startup configuration (permissions), and available
|
||||
user configurable options. Add-on metadata typically references a container image that
|
||||
Supervisor fetches during installation. If not, the Supervisor builds the container
|
||||
image from a Dockerfile.
|
||||
|
||||
**Built-in Stores**: Supervisor comes with several pre-configured stores:
|
||||
- **Core Add-ons**: Official add-ons maintained by the Home Assistant team
|
||||
- **Community Add-ons**: Popular third-party add-ons repository
|
||||
- **ESPHome**: Add-ons for ESPHome ecosystem integration
|
||||
- **Music Assistant**: Audio and music-related add-ons
|
||||
- **Local Development**: Local folder for testing custom add-ons during development
|
||||
|
||||
**Store Management**: Stores are Git-based repositories that are periodically updated.
|
||||
When updates are available, users receive notifications.
|
||||
|
||||
**Add-on Lifecycle**:
|
||||
- **Installation**: Supervisor fetches or builds container images based on add-on
|
||||
metadata
|
||||
- **Configuration**: Schema-validated options with integrated UI management
|
||||
- **Runtime**: Full container lifecycle management, health monitoring
|
||||
- **Updates**: Automatic or manual version management
|
||||
|
||||
### Update System
|
||||
|
||||
**Core Components**: Supervisor, Home Assistant Core, HAOS, and built-in plugins
|
||||
receive version information from a central JSON file fetched from
|
||||
`https://version.home-assistant.io/{channel}.json`. The `Updater` class handles
|
||||
fetching this data, validating signatures, and updating internal version tracking.
|
||||
|
||||
**Update Channels**: Three channels (`stable`/`beta`/`dev`) determine which version
|
||||
JSON file is fetched, allowing users to opt into different release streams.
|
||||
|
||||
**Add-on Updates**: Add-on version information comes from store repository updates, not
|
||||
the central JSON file. When repositories are refreshed via the store system, add-ons
|
||||
compare their local versions against repository versions to determine update
|
||||
availability.
|
||||
|
||||
### Backup & Recovery System
|
||||
|
||||
**Backup Capabilities**:
|
||||
- **Full Backups**: Complete system state capture including all add-ons,
|
||||
configuration, and data
|
||||
- **Partial Backups**: Selective backup of specific components (Home Assistant,
|
||||
add-ons, folders)
|
||||
- **Encrypted Backups**: Optional backup encryption with user-provided passwords
|
||||
- **Multiple Storage Locations**: Local storage and remote backup destinations
|
||||
|
||||
**Recovery Features**:
|
||||
- **One-click Restore**: Simple restoration from backup files
|
||||
- **Selective Restore**: Choose specific components to restore
|
||||
- **Automatic Recovery**: Self-healing for common system issues
|
||||
|
||||
---
|
||||
|
||||
## Supervisor Development
|
||||
|
||||
### Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
- **Language Features**: Use modern Python features:
|
||||
- Type hints with `typing` module
|
||||
- f-strings (preferred over `%` or `.format()`)
|
||||
- Dataclasses and enum classes
|
||||
- Async/await patterns
|
||||
- Pattern matching where appropriate
|
||||
|
||||
### Code Quality Standards
|
||||
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Testing**: pytest with asyncio support
|
||||
- **Language**: American English for all code, comments, and documentation
|
||||
|
||||
### Code Organization
|
||||
|
||||
**Core Structure**:
|
||||
```
|
||||
supervisor/
|
||||
├── __init__.py # Package initialization
|
||||
├── const.py # Constants and enums
|
||||
├── coresys.py # Core system management
|
||||
├── bootstrap.py # System initialization
|
||||
├── exceptions.py # Custom exception classes
|
||||
├── api/ # REST API endpoints
|
||||
├── addons/ # Add-on management
|
||||
├── backups/ # Backup system
|
||||
├── docker/ # Docker integration
|
||||
├── host/ # Host system interface
|
||||
├── homeassistant/ # Home Assistant Core management
|
||||
├── dbus/ # D-Bus system integration
|
||||
├── hardware/ # Hardware detection and management
|
||||
├── plugins/ # Plugin system
|
||||
├── resolution/ # Issue detection and resolution
|
||||
├── security/ # Security management
|
||||
├── services/ # Service discovery and management
|
||||
├── store/ # Add-on store management
|
||||
└── utils/ # Utility functions
|
||||
```
|
||||
|
||||
**Shared Constants**: Use constants from `supervisor/const.py` instead of hardcoding
|
||||
values. Define new constants following existing patterns and group related constants
|
||||
together.
|
||||
|
||||
### Supervisor Architecture Patterns
|
||||
|
||||
**CoreSysAttributes Inheritance Pattern**: Nearly all major classes in Supervisor
|
||||
inherit from `CoreSysAttributes`, providing access to the centralized system state
|
||||
via `self.coresys` and convenient `sys_*` properties.
|
||||
|
||||
```python
|
||||
# Standard Supervisor class pattern
|
||||
class MyManager(CoreSysAttributes):
|
||||
"""Manage my functionality."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize manager."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._component: MyComponent = MyComponent(coresys)
|
||||
|
||||
@property
|
||||
def component(self) -> MyComponent:
|
||||
"""Return component handler."""
|
||||
return self._component
|
||||
|
||||
# Access system components via inherited properties
|
||||
async def do_something(self):
|
||||
await self.sys_docker.containers.get("my_container")
|
||||
self.sys_bus.fire_event(BusEvent.MY_EVENT, {"data": "value"})
|
||||
```
|
||||
|
||||
**Key Inherited Properties from CoreSysAttributes**:
|
||||
- `self.sys_docker` - Docker API access
|
||||
- `self.sys_run_in_executor()` - Execute blocking operations
|
||||
- `self.sys_create_task()` - Create async tasks
|
||||
- `self.sys_bus` - Event bus for system events
|
||||
- `self.sys_config` - System configuration
|
||||
- `self.sys_homeassistant` - Home Assistant Core management
|
||||
- `self.sys_addons` - Add-on management
|
||||
- `self.sys_host` - Host system access
|
||||
- `self.sys_dbus` - D-Bus system interface
|
||||
|
||||
**Load Pattern**: Many components implement a `load()` method which effectively
|
||||
initialize the component from external sources (containers, files, D-Bus services).
|
||||
|
||||
### API Development
|
||||
|
||||
**REST API Structure**:
|
||||
- **Base Path**: `/api/` for all endpoints
|
||||
- **Authentication**: Bearer token authentication
|
||||
- **Consistent Response Format**: `{"result": "ok", "data": {...}}` or
|
||||
`{"result": "error", "message": "..."}`
|
||||
- **Validation**: Use voluptuous schemas with `api_validate()`
|
||||
|
||||
**Use `@api_process` Decorator**: This decorator handles all standard error handling
|
||||
and response formatting automatically. The decorator catches `APIError`, `HassioError`,
|
||||
and other exceptions, returning appropriate HTTP responses.
|
||||
|
||||
```python
|
||||
from ..api.utils import api_process, api_validate
|
||||
|
||||
@api_process
|
||||
async def backup_full(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Create full backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||
job = await self.sys_backups.do_backup_full(**body)
|
||||
return {ATTR_JOB_ID: job.uuid}
|
||||
```
|
||||
|
||||
### Docker Integration
|
||||
|
||||
- **Container Management**: Use Supervisor's Docker manager instead of direct
|
||||
Docker API
|
||||
- **Networking**: Supervisor manages internal Docker networks with predefined IP
|
||||
ranges
|
||||
- **Security**: AppArmor profiles, capability restrictions, and user namespace
|
||||
isolation
|
||||
- **Health Checks**: Implement health monitoring for all managed containers
|
||||
|
||||
### D-Bus Integration
|
||||
|
||||
- **Use dbus-fast**: Async D-Bus library for system integration
|
||||
- **Service Management**: systemd, NetworkManager, hostname management
|
||||
- **Error Handling**: Wrap D-Bus exceptions in Supervisor-specific exceptions
|
||||
|
||||
### Async Programming
|
||||
|
||||
- **All I/O operations must be async**: File operations, network calls, subprocess
|
||||
execution
|
||||
- **Use asyncio patterns**: Prefer `asyncio.gather()` over sequential awaits
|
||||
- **Executor jobs**: Use `self.sys_run_in_executor()` for blocking operations
|
||||
- **Two-phase initialization**: `__init__` for sync setup, `post_init()` for async
|
||||
initialization
|
||||
|
||||
### Testing
|
||||
|
||||
- **Location**: `tests/` directory with module mirroring
|
||||
- **Fixtures**: Extensive use of pytest fixtures for CoreSys setup
|
||||
- **Mocking**: Mock external dependencies (Docker, D-Bus, network calls)
|
||||
- **Coverage**: Minimum 90% test coverage, 100% for security-sensitive code
|
||||
|
||||
### Error Handling
|
||||
|
||||
- **Custom Exceptions**: Defined in `exceptions.py` with clear inheritance hierarchy
|
||||
- **Error Propagation**: Use `from` clause for exception chaining
|
||||
- **API Errors**: Use `APIError` with appropriate HTTP status codes
|
||||
|
||||
### Security Considerations
|
||||
|
||||
- **Container Security**: AppArmor profiles mandatory for add-ons, minimal
|
||||
capabilities
|
||||
- **Authentication**: Token-based API authentication with role-based access
|
||||
- **Data Protection**: Backup encryption, secure secret management, comprehensive
|
||||
input validation
|
||||
|
||||
### Development Commands
|
||||
|
||||
```bash
|
||||
# Run tests, adjust paths as necessary
|
||||
pytest -qsx tests/
|
||||
|
||||
# Linting and formatting
|
||||
ruff check supervisor/
|
||||
ruff format supervisor/
|
||||
|
||||
# Type checking
|
||||
mypy --ignore-missing-imports supervisor/
|
||||
|
||||
# Pre-commit hooks
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
Always run the pre-commit hooks at the end of code editing.
|
||||
|
||||
### Common Patterns to Follow
|
||||
|
||||
**✅ Use These Patterns**:
|
||||
- Inherit from `CoreSysAttributes` for system access
|
||||
- Use `@api_process` decorator for API endpoints
|
||||
- Use `self.sys_run_in_executor()` for blocking operations
|
||||
- Access Docker via `self.sys_docker` not direct Docker API
|
||||
- Use constants from `const.py` instead of hardcoding
|
||||
- Store types in (per-module) `const.py` (e.g. supervisor/store/const.py)
|
||||
|
||||
**❌ Avoid These Patterns**:
|
||||
- Direct Docker API usage - use Supervisor's Docker manager
|
||||
- Blocking operations in async context (use asyncio alternatives)
|
||||
- Hardcoded values - use constants from `const.py`
|
||||
- Manual error handling in API endpoints - let `@api_process` handle it
|
||||
|
||||
This guide provides the foundation for contributing to Home Assistant Supervisor.
|
||||
Follow these patterns and guidelines to ensure code quality, security, and
|
||||
maintainability.
|
105
.github/workflows/builder.yml
vendored
105
.github/workflows/builder.yml
vendored
@ -33,7 +33,7 @@ on:
|
||||
- setup.py
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.11"
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
|
||||
@ -53,7 +53,7 @@ jobs:
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@ -70,29 +70,25 @@ jobs:
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: masesgroup/retrieve-changed-files@v3.0.0
|
||||
uses: jitterbit/get-changed-files@v1
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
|
||||
echo "::set-output name=changed::true"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@ -106,13 +102,13 @@ jobs:
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
uses: home-assistant/wheels@2023.04.0
|
||||
with:
|
||||
abi: cp313
|
||||
abi: cp311
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev"
|
||||
apk: "libffi-dev;openssl-dev"
|
||||
skip-binary: aiohttp
|
||||
env-file: true
|
||||
requirements: "requirements.txt"
|
||||
@ -123,33 +119,16 @@ jobs:
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Login to DockerHub
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Install Cosign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
with:
|
||||
cosign-release: "v2.4.3"
|
||||
|
||||
- name: Install dirhash and calc hash
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
pip3 install setuptools dirhash
|
||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
||||
|
||||
- name: Sign supervisor SHA256
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v2.1.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -160,17 +139,55 @@ jobs:
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2023.03.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--target /data \
|
||||
--cosign \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
env:
|
||||
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
||||
|
||||
codenotary:
|
||||
name: CAS signature
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v3.5.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v4.6.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Install dirhash and calc hash
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
id: dirhash
|
||||
run: |
|
||||
pip3 install dirhash
|
||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||
echo "::set-output name=dirhash::${dir_hash}"
|
||||
|
||||
- name: Signing Source
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/codenotary@master
|
||||
with:
|
||||
source: hash://${{ steps.dirhash.outputs.dirhash }}
|
||||
asset: supervisor-${{ needs.init.outputs.version }}
|
||||
token: ${{ secrets.CAS_TOKEN }}
|
||||
|
||||
version:
|
||||
name: Update version
|
||||
needs: ["init", "run_supervisor"]
|
||||
@ -178,7 +195,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@ -199,15 +216,15 @@ jobs:
|
||||
run_supervisor:
|
||||
runs-on: ubuntu-latest
|
||||
name: Run the Supervisor
|
||||
needs: ["build", "init"]
|
||||
needs: ["build", "codenotary", "init"]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2023.03.0
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
@ -219,7 +236,7 @@ jobs:
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Create the Supervisor
|
||||
run: |
|
||||
@ -236,7 +253,7 @@ jobs:
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
||||
homeassistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Start the Supervisor
|
||||
run: docker start hassio_supervisor
|
||||
@ -324,7 +341,7 @@ jobs:
|
||||
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
||||
exit 1
|
||||
fi
|
||||
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
|
||||
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
|
||||
|
||||
- name: Uninstall SSH add-on
|
||||
run: |
|
||||
|
265
.github/workflows/ci.yaml
vendored
265
.github/workflows/ci.yaml
vendored
@ -8,9 +8,9 @@ on:
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
MYPY_CACHE_VERSION: 1
|
||||
DEFAULT_PYTHON: "3.11"
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
DEFAULT_CAS: v1.0.2
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||
@ -26,15 +26,15 @@ jobs:
|
||||
name: Prepare Python dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -48,10 +48,9 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
@ -62,21 +61,21 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
lint-black:
|
||||
name: Check black
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -86,67 +85,10 @@ jobs:
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run ruff-format
|
||||
- name: Run black
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
@ -154,7 +96,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@ -169,15 +111,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -189,9 +131,9 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
@ -207,21 +149,53 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -233,9 +207,50 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
@ -257,15 +272,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -275,10 +290,6 @@ jobs:
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||
@ -287,51 +298,46 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
mypy:
|
||||
name: Check mypy
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Generate partial mypy restore key
|
||||
id: generate-mypy-key
|
||||
run: |
|
||||
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
|
||||
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
||||
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.3
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
${{ runner.os }}-mypy-${{ needs.prepare.outputs.python-version }}-${{ steps.generate-mypy-key.outputs.key }}
|
||||
restore-keys: >-
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-mypy-${{ env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}
|
||||
- name: Register mypy problem matcher
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/mypy.json"
|
||||
- name: Run mypy
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mypy --ignore-missing-imports supervisor
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
@ -339,19 +345,19 @@ jobs:
|
||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
- name: Install CAS tools
|
||||
uses: home-assistant/actions/helpers/cas@master
|
||||
with:
|
||||
cosign-release: "v2.4.3"
|
||||
version: ${{ env.DEFAULT_CAS }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -364,7 +370,7 @@ jobs:
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
@ -386,11 +392,10 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
include-hidden-files: true
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
@ -398,15 +403,15 @@ jobs:
|
||||
needs: ["pytest", "prepare"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v4.6.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v3.3.1
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -417,7 +422,7 @@ jobs:
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v3
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@ -425,4 +430,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5.4.3
|
||||
uses: codecov/codecov-action@v3.1.4
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
- uses: dessant/lock-threads@v4.0.0
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/mypy.json
vendored
16
.github/workflows/matchers/mypy.json
vendored
@ -1,16 +0,0 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "mypy",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"severity": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
6
.github/workflows/release-drafter.yml
vendored
6
.github/workflows/release-drafter.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@ -33,10 +33,10 @@ jobs:
|
||||
|
||||
echo Current version: $latest
|
||||
echo New target version: $datepre.$newpost
|
||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v6.1.0
|
||||
uses: release-drafter/release-drafter@v5.23.0
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
|
58
.github/workflows/restrict-task-creation.yml
vendored
58
.github/workflows/restrict-task-creation.yml
vendored
@ -1,58 +0,0 @@
|
||||
name: Restrict task creation
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
check-authorization:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if this is a Task issue type (from the issue form)
|
||||
if: github.event.issue.issue_type == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
// Check if user is an organization member
|
||||
try {
|
||||
await github.rest.orgs.checkMembershipForUser({
|
||||
org: 'home-assistant',
|
||||
username: issueAuthor
|
||||
});
|
||||
console.log(`✅ ${issueAuthor} is an organization member`);
|
||||
return; // Authorized
|
||||
} catch (error) {
|
||||
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
|
||||
}
|
||||
|
||||
// Close the issue with a comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
|
||||
`Task issues are restricted to Open Home Foundation staff and authorized contributors.\n\n` +
|
||||
`If you would like to:\n` +
|
||||
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)\n` +
|
||||
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
|
||||
`If you believe you should have access to create Task issues, please contact the maintainers.`
|
||||
});
|
||||
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Add a label to indicate this was auto-closed
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['auto-closed']
|
||||
});
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@ -10,9 +10,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v3.2.0
|
||||
uses: getsentry/action-release@v1.4.1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9.1.0
|
||||
- uses: actions/stale@v8.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 30
|
||||
|
82
.github/workflows/update_frontend.yml
vendored
82
.github/workflows/update_frontend.yml
vendored
@ -1,82 +0,0 @@
|
||||
name: Update frontend
|
||||
|
||||
on:
|
||||
schedule: # once a day
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
||||
current_version: ${{ steps.check_version.outputs.current_version }}
|
||||
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Get latest frontend release
|
||||
id: latest_frontend_version
|
||||
uses: abatilo/release-info-action@v1.3.3
|
||||
with:
|
||||
owner: home-assistant
|
||||
repo: frontend
|
||||
- name: Check if version is up to date
|
||||
id: check_version
|
||||
run: |
|
||||
current_version="$(cat .ha-frontend-version)"
|
||||
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
|
||||
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
|
||||
if [[ ! "$current_version" < "$latest_version" ]]; then
|
||||
echo "Frontend version is up to date"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Check if there is no open PR with this version
|
||||
if: steps.check_version.outputs.skip != 'true'
|
||||
id: check_existing_pr
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
|
||||
if [[ "$PR" != "[]" ]]; then
|
||||
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
create-pr:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-version
|
||||
if: needs.check-version.outputs.skip != 'true'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Clear www folder
|
||||
run: |
|
||||
rm -rf supervisor/api/panel/*
|
||||
- name: Update version file
|
||||
run: |
|
||||
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
|
||||
- name: Download release assets
|
||||
uses: robinraju/release-downloader@v1
|
||||
with:
|
||||
repository: 'home-assistant/frontend'
|
||||
tag: ${{ needs.check-version.outputs.latest_version }}
|
||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
|
||||
extract: true
|
||||
out-file-path: supervisor/api/panel/
|
||||
- name: Remove release assets archive
|
||||
run: |
|
||||
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
|
||||
- name: Create PR
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
||||
branch: autoupdate-frontend
|
||||
base: main
|
||||
draft: true
|
||||
sign-commits: true
|
||||
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
||||
body: >
|
||||
Update frontend from ${{ needs.check-version.outputs.current_version }} to
|
||||
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})
|
||||
|
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
||||
branch = dev
|
@ -1 +0,0 @@
|
||||
20250401.0
|
@ -3,5 +3,4 @@ ignored:
|
||||
- DL3006
|
||||
- DL3013
|
||||
- DL3018
|
||||
- DL3042
|
||||
- SC2155
|
||||
|
@ -1,27 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.10
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: black
|
||||
args:
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py310
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.7.0
|
||||
- pydocstyle==6.3.0
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: v4.3.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: local
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
# Run mypy through our wrapper script in order to get the possible
|
||||
# pyenv and/or virtualenv activated; it may not have been e.g. if
|
||||
# committing from a GUI tool that was not launched from an activated
|
||||
# shell.
|
||||
- id: mypy
|
||||
name: mypy
|
||||
entry: script/run-in-env.sh mypy --ignore-missing-imports
|
||||
language: script
|
||||
types_or: [python, pyi]
|
||||
files: ^supervisor/.+\.(py|pyi)$
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.4.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py310-plus]
|
||||
|
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@ -58,23 +58,9 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Ruff Check",
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "ruff check --fix supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Ruff Format",
|
||||
"type": "shell",
|
||||
"command": "ruff format supervisor tests",
|
||||
"command": "flake8 supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
36
Dockerfile
36
Dockerfile
@ -4,20 +4,16 @@ FROM ${BUILD_FROM}
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost \
|
||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
|
||||
UV_SYSTEM_PYTHON=true
|
||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
|
||||
|
||||
ARG \
|
||||
COSIGN_VERSION \
|
||||
BUILD_ARCH \
|
||||
QEMU_CPU
|
||||
CAS_VERSION
|
||||
|
||||
# Install base
|
||||
WORKDIR /usr/src
|
||||
RUN \
|
||||
set -x \
|
||||
&& apk add --no-cache \
|
||||
findutils \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
@ -25,27 +21,33 @@ RUN \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl \
|
||||
yaml \
|
||||
&& apk add --no-cache --virtual .build-dependencies \
|
||||
build-base \
|
||||
go \
|
||||
\
|
||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||
&& chmod a+x /usr/bin/cosign \
|
||||
&& pip3 install uv==0.6.17
|
||||
&& git clone -b "v${CAS_VERSION}" --depth 1 \
|
||||
https://github.com/codenotary/cas \
|
||||
&& cd cas \
|
||||
&& make cas \
|
||||
&& mv cas /usr/bin/cas \
|
||||
\
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -rf /root/go /root/.cache \
|
||||
&& rm -rf /usr/src/cas
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN \
|
||||
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
setarch="linux32"; \
|
||||
else \
|
||||
setarch=""; \
|
||||
fi \
|
||||
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/musllinux/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
uv pip install --no-cache -e ./supervisor \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
|
@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
|
||||
|
||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||
|
||||
[](https://www.openhomefoundation.org/)
|
||||
|
18
build.yaml
18
build.yaml
@ -1,18 +1,16 @@
|
||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||
image: homeassistant/{arch}-hassio-supervisor
|
||||
shadow_repository: ghcr.io/home-assistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.11-alpine3.16
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.11-alpine3.16
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.11-alpine3.16
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.11-alpine3.16
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.11-alpine3.16
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||
identity: https://github.com/home-assistant/supervisor/.*
|
||||
args:
|
||||
COSIGN_VERSION: 2.4.3
|
||||
CAS_VERSION: 1.0.2
|
||||
labels:
|
||||
io.hass.type: supervisor
|
||||
org.opencontainers.image.title: Home Assistant Supervisor
|
||||
|
1
home-assistant-polymer
Submodule
1
home-assistant-polymer
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit efa02c309bcd986e2b90234052d1bc3d7e1168bd
|
45
pylintrc
Normal file
45
pylintrc
Normal file
@ -0,0 +1,45 @@
|
||||
[MASTER]
|
||||
reports=no
|
||||
jobs=2
|
||||
|
||||
good-names=id,i,j,k,ex,Run,_,fp,T,os
|
||||
|
||||
extension-pkg-whitelist=
|
||||
ciso8601
|
||||
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
# locally-disabled - it spams too much
|
||||
# duplicate-code - unavoidable
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
# abstract-class-not-used - is flaky, should not show up but does
|
||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||
# too-many-* - are not enforced for the sake of readability
|
||||
# too-few-* - same as too-many-*
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
disable=
|
||||
format,
|
||||
abstract-method,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
locally-disabled,
|
||||
no-else-return,
|
||||
not-context-manager,
|
||||
too-few-public-methods,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-locals,
|
||||
too-many-public-methods,
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
unused-argument,
|
||||
consider-using-with
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=builtins.Exception
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
ignored-modules = distutils
|
376
pyproject.toml
376
pyproject.toml
@ -1,376 +0,0 @@
|
||||
[build-system]
|
||||
requires = ["setuptools~=80.9.0", "wheel~=0.46.1"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "Supervisor"
|
||||
dynamic = ["version", "dependencies"]
|
||||
license = { text = "Apache-2.0" }
|
||||
description = "Open-source private cloud os for Home-Assistant based on HassOS"
|
||||
readme = "README.md"
|
||||
authors = [
|
||||
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
||||
]
|
||||
keywords = ["docker", "home-assistant", "api"]
|
||||
requires-python = ">=3.13.0"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://www.home-assistant.io/"
|
||||
"Source Code" = "https://github.com/home-assistant/supervisor"
|
||||
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
|
||||
"Docs: Dev" = "https://developers.home-assistant.io/"
|
||||
"Discord" = "https://www.home-assistant.io/join-chat/"
|
||||
"Forum" = "https://community.home-assistant.io/"
|
||||
|
||||
[tool.setuptools]
|
||||
platforms = ["any"]
|
||||
zip-safe = false
|
||||
include-package-data = true
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["supervisor*"]
|
||||
|
||||
[tool.pylint.MAIN]
|
||||
py-version = "3.13"
|
||||
# Use a conservative default here; 2 should speed up most setups and not hurt
|
||||
# any too bad. Override on command line as appropriate.
|
||||
jobs = 2
|
||||
persistent = false
|
||||
extension-pkg-allow-list = ["ciso8601"]
|
||||
|
||||
[tool.pylint.BASIC]
|
||||
class-const-naming-style = "any"
|
||||
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
# Reasons disabled:
|
||||
# format - handled by ruff
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
# duplicate-code - unavoidable
|
||||
# locally-disabled - it spams too much
|
||||
# too-many-* - are not enforced for the sake of readability
|
||||
# too-few-* - same as too-many-*
|
||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||
disable = [
|
||||
"format",
|
||||
"abstract-method",
|
||||
"cyclic-import",
|
||||
"duplicate-code",
|
||||
"locally-disabled",
|
||||
"no-else-return",
|
||||
"not-context-manager",
|
||||
"too-few-public-methods",
|
||||
"too-many-arguments",
|
||||
"too-many-branches",
|
||||
"too-many-instance-attributes",
|
||||
"too-many-lines",
|
||||
"too-many-locals",
|
||||
"too-many-public-methods",
|
||||
"too-many-return-statements",
|
||||
"too-many-statements",
|
||||
"unused-argument",
|
||||
"consider-using-with",
|
||||
|
||||
# Handled by ruff
|
||||
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
||||
"await-outside-async", # PLE1142
|
||||
"bad-str-strip-call", # PLE1310
|
||||
"bad-string-format-type", # PLE1307
|
||||
"bidirectional-unicode", # PLE2502
|
||||
"continue-in-finally", # PLE0116
|
||||
"duplicate-bases", # PLE0241
|
||||
"format-needs-mapping", # F502
|
||||
"function-redefined", # F811
|
||||
# Needed because ruff does not understand type of __all__ generated by a function
|
||||
# "invalid-all-format", # PLE0605
|
||||
"invalid-all-object", # PLE0604
|
||||
"invalid-character-backspace", # PLE2510
|
||||
"invalid-character-esc", # PLE2513
|
||||
"invalid-character-nul", # PLE2514
|
||||
"invalid-character-sub", # PLE2512
|
||||
"invalid-character-zero-width-space", # PLE2515
|
||||
"logging-too-few-args", # PLE1206
|
||||
"logging-too-many-args", # PLE1205
|
||||
"missing-format-string-key", # F524
|
||||
"mixed-format-string", # F506
|
||||
"no-method-argument", # N805
|
||||
"no-self-argument", # N805
|
||||
"nonexistent-operator", # B002
|
||||
"nonlocal-without-binding", # PLE0117
|
||||
"not-in-loop", # F701, F702
|
||||
"notimplemented-raised", # F901
|
||||
"return-in-init", # PLE0101
|
||||
"return-outside-function", # F706
|
||||
"syntax-error", # E999
|
||||
"too-few-format-args", # F524
|
||||
"too-many-format-args", # F522
|
||||
"too-many-star-expressions", # F622
|
||||
"truncated-format-string", # F501
|
||||
"undefined-all-variable", # F822
|
||||
"undefined-variable", # F821
|
||||
"used-prior-global-declaration", # PLE0118
|
||||
"yield-inside-async-function", # PLE1700
|
||||
"yield-outside-function", # F704
|
||||
"anomalous-backslash-in-string", # W605
|
||||
"assert-on-string-literal", # PLW0129
|
||||
"assert-on-tuple", # F631
|
||||
"bad-format-string", # W1302, F
|
||||
"bad-format-string-key", # W1300, F
|
||||
"bare-except", # E722
|
||||
"binary-op-exception", # PLW0711
|
||||
"cell-var-from-loop", # B023
|
||||
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
||||
"duplicate-except", # B014
|
||||
"duplicate-key", # F601
|
||||
"duplicate-string-formatting-argument", # F
|
||||
"duplicate-value", # F
|
||||
"eval-used", # PGH001
|
||||
"exec-used", # S102
|
||||
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
||||
"f-string-without-interpolation", # F541
|
||||
"forgotten-debug-statement", # T100
|
||||
"format-string-without-interpolation", # F
|
||||
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||
"global-variable-not-assigned", # PLW0602
|
||||
"implicit-str-concat", # ISC001
|
||||
"import-self", # PLW0406
|
||||
"inconsistent-quotes", # Q000
|
||||
"invalid-envvar-default", # PLW1508
|
||||
"keyword-arg-before-vararg", # B026
|
||||
"logging-format-interpolation", # G
|
||||
"logging-fstring-interpolation", # G
|
||||
"logging-not-lazy", # G
|
||||
"misplaced-future", # F404
|
||||
"named-expr-without-context", # PLW0131
|
||||
"nested-min-max", # PLW3301
|
||||
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||
"raise-missing-from", # TRY200
|
||||
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||
"try-except-raise", # TRY203
|
||||
"unused-argument", # ARG001, we don't use it
|
||||
"unused-format-string-argument", #F507
|
||||
"unused-format-string-key", # F504
|
||||
"unused-import", # F401
|
||||
"unused-variable", # F841
|
||||
"useless-else-on-loop", # PLW0120
|
||||
"wildcard-import", # F403
|
||||
"bad-classmethod-argument", # N804
|
||||
"consider-iterating-dictionary", # SIM118
|
||||
"empty-docstring", # D419
|
||||
"invalid-name", # N815
|
||||
"line-too-long", # E501, disabled globally
|
||||
"missing-class-docstring", # D101
|
||||
"missing-final-newline", # W292
|
||||
"missing-function-docstring", # D103
|
||||
"missing-module-docstring", # D100
|
||||
"multiple-imports", #E401
|
||||
"singleton-comparison", # E711, E712
|
||||
"subprocess-run-check", # PLW1510
|
||||
"superfluous-parens", # UP034
|
||||
"ungrouped-imports", # I001
|
||||
"unidiomatic-typecheck", # E721
|
||||
"unnecessary-direct-lambda-call", # PLC3002
|
||||
"unnecessary-lambda-assignment", # PLC3001
|
||||
"unneeded-not", # SIM208
|
||||
"useless-import-alias", # PLC0414
|
||||
"wrong-import-order", # I001
|
||||
"wrong-import-position", # E402
|
||||
"comparison-of-constants", # PLR0133
|
||||
"comparison-with-itself", # PLR0124
|
||||
# "consider-alternative-union-syntax", # UP007, typing extension
|
||||
"consider-merging-isinstance", # PLR1701
|
||||
# "consider-using-alias", # UP006, typing extension
|
||||
"consider-using-dict-comprehension", # C402
|
||||
"consider-using-generator", # C417
|
||||
"consider-using-get", # SIM401
|
||||
"consider-using-set-comprehension", # C401
|
||||
"consider-using-sys-exit", # PLR1722
|
||||
"consider-using-ternary", # SIM108
|
||||
"literal-comparison", # F632
|
||||
"property-with-parameters", # PLR0206
|
||||
"super-with-arguments", # UP008
|
||||
"too-many-branches", # PLR0912
|
||||
"too-many-return-statements", # PLR0911
|
||||
"too-many-statements", # PLR0915
|
||||
"trailing-comma-tuple", # COM818
|
||||
"unnecessary-comprehension", # C416
|
||||
"use-a-generator", # C417
|
||||
"use-dict-literal", # C406
|
||||
"use-list-literal", # C405
|
||||
"useless-object-inheritance", # UP004
|
||||
"useless-return", # PLR1711
|
||||
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
||||
]
|
||||
|
||||
[tool.pylint.REPORTS]
|
||||
score = false
|
||||
|
||||
[tool.pylint.TYPECHECK]
|
||||
ignored-modules = ["distutils"]
|
||||
|
||||
[tool.pylint.FORMAT]
|
||||
expected-line-ending-format = "LF"
|
||||
|
||||
[tool.pylint.EXCEPTIONS]
|
||||
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
|
||||
|
||||
[tool.pylint.DESIGN]
|
||||
max-positional-arguments = 10
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
norecursedirs = [".git"]
|
||||
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [
|
||||
"error",
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
|
||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||
]
|
||||
markers = [
|
||||
"no_mock_init_websession: disable the autouse mock of init_websession for this test",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
lint.select = [
|
||||
"B002", # Python does not support the unary prefix increment
|
||||
"B007", # Loop control variable {name} not used within loop body
|
||||
"B014", # Exception handler with duplicate exception
|
||||
"B023", # Function definition does not bind loop variable {name}
|
||||
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
||||
"B904", # Use raise from to specify exception cause
|
||||
"C", # complexity
|
||||
"COM818", # Trailing comma on bare tuple prohibited
|
||||
"D", # docstrings
|
||||
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
||||
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes/autoflake
|
||||
"G", # flake8-logging-format
|
||||
"I", # isort
|
||||
"ICN001", # import concentions; {name} should be imported as {asname}
|
||||
"N804", # First argument of a class method should be named cls
|
||||
"N805", # First argument of a method should be named self
|
||||
"N815", # Variable {name} in class scope should not be mixedCase
|
||||
"PGH004", # Use specific rule codes when using noqa
|
||||
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
||||
"PLC", # pylint
|
||||
"PLE", # pylint
|
||||
"PLR", # pylint
|
||||
"PLW", # pylint
|
||||
"Q000", # Double quotes found but single quotes preferred
|
||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||
"S102", # Use of exec detected
|
||||
"S103", # bad-file-permissions
|
||||
"S108", # hardcoded-temp-file
|
||||
"S306", # suspicious-mktemp-usage
|
||||
"S307", # suspicious-eval-usage
|
||||
"S313", # suspicious-xmlc-element-tree-usage
|
||||
"S314", # suspicious-xml-element-tree-usage
|
||||
"S315", # suspicious-xml-expat-reader-usage
|
||||
"S316", # suspicious-xml-expat-builder-usage
|
||||
"S317", # suspicious-xml-sax-usage
|
||||
"S318", # suspicious-xml-mini-dom-usage
|
||||
"S319", # suspicious-xml-pull-dom-usage
|
||||
"S601", # paramiko-call
|
||||
"S602", # subprocess-popen-with-shell-equals-true
|
||||
"S604", # call-with-shell-equals-true
|
||||
"S608", # hardcoded-sql-expression
|
||||
"S609", # unix-command-wildcard-injection
|
||||
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||
"SIM117", # Merge with-statements that use the same scope
|
||||
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
||||
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
||||
"SIM208", # Use {expr} instead of not (not {expr})
|
||||
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
||||
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
||||
"SIM401", # Use get from dict with default instead of an if block
|
||||
"T100", # Trace found: {name} used
|
||||
"T20", # flake8-print
|
||||
"TID251", # Banned imports
|
||||
"TRY004", # Prefer TypeError exception for invalid type
|
||||
"TRY203", # Remove exception handler; error is immediately re-raised
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle
|
||||
]
|
||||
|
||||
lint.ignore = [
|
||||
"D202", # No blank lines allowed after function docstring
|
||||
"D203", # 1 blank line required before class docstring
|
||||
"D213", # Multi-line docstring summary should start at the second line
|
||||
"D406", # Section name should end with a newline
|
||||
"D407", # Section name underlining
|
||||
"E501", # line too long
|
||||
"E731", # do not assign a lambda expression, use a def
|
||||
|
||||
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
||||
# be ignored anymore without warnings.
|
||||
# https://github.com/astral-sh/ruff/issues/7491
|
||||
# "PLC1901", # Lots of false positives
|
||||
|
||||
# False positives https://github.com/astral-sh/ruff/issues/5386
|
||||
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
||||
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
||||
"PLR0912", # Too many branches ({branches} > {max_branches})
|
||||
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||
"UP006", # keep type annotation style as is
|
||||
"UP007", # keep type annotation style as is
|
||||
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||
|
||||
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||
"W191",
|
||||
"E111",
|
||||
"E114",
|
||||
"E117",
|
||||
"D206",
|
||||
"D300",
|
||||
"Q000",
|
||||
"Q001",
|
||||
"Q002",
|
||||
"Q003",
|
||||
"COM812",
|
||||
"COM819",
|
||||
"ISC001",
|
||||
"ISC002",
|
||||
|
||||
# Disabled because ruff does not understand type of __all__ generated by a function
|
||||
"PLE0605",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
|
||||
voluptuous = "vol"
|
||||
|
||||
[tool.ruff.lint.flake8-pytest-style]
|
||||
fixture-parentheses = false
|
||||
|
||||
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||
"pytz".msg = "use zoneinfo instead"
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
force-sort-within-sections = true
|
||||
section-order = [
|
||||
"future",
|
||||
"standard-library",
|
||||
"third-party",
|
||||
"first-party",
|
||||
"local-folder",
|
||||
]
|
||||
forced-separate = ["tests"]
|
||||
known-first-party = ["supervisor", "tests"]
|
||||
combine-as-imports = true
|
||||
split-on-trailing-comma = false
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
|
||||
# DBus Service Mocks must use typing and names understood by dbus-fast
|
||||
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 25
|
2
pytest.ini
Normal file
2
pytest.ini
Normal file
@ -0,0 +1,2 @@
|
||||
[pytest]
|
||||
asyncio_mode = auto
|
@ -1,30 +1,26 @@
|
||||
aiodns==3.5.0
|
||||
aiohttp==3.12.15
|
||||
aiodns==3.0.0
|
||||
aiohttp==3.8.4
|
||||
async_timeout==4.0.2
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==25.3.0
|
||||
awesomeversion==25.5.0
|
||||
blockbuster==1.5.25
|
||||
brotli==1.1.0
|
||||
ciso8601==2.3.2
|
||||
colorlog==6.9.0
|
||||
cpe==1.3.1
|
||||
cryptography==45.0.5
|
||||
debugpy==1.8.15
|
||||
deepmerge==2.0
|
||||
dirhash==0.5.0
|
||||
docker==7.1.0
|
||||
faust-cchardet==2.1.19
|
||||
gitpython==3.1.45
|
||||
jinja2==3.1.6
|
||||
log-rate-limit==1.4.2
|
||||
orjson==3.11.1
|
||||
pulsectl==24.12.0
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.4
|
||||
securetar==2025.2.1
|
||||
sentry-sdk==2.34.1
|
||||
setuptools==80.9.0
|
||||
voluptuous==0.15.2
|
||||
dbus-fast==2.44.2
|
||||
zlib-fast==0.2.1
|
||||
attrs==23.1.0
|
||||
awesomeversion==23.5.0
|
||||
brotli==1.0.9
|
||||
ciso8601==2.3.0
|
||||
colorlog==6.7.0
|
||||
cpe==1.2.1
|
||||
cryptography==41.0.1
|
||||
debugpy==1.6.7
|
||||
deepmerge==1.1.0
|
||||
dirhash==0.2.1
|
||||
docker==6.1.3
|
||||
faust-cchardet==2.1.18
|
||||
gitpython==3.1.31
|
||||
jinja2==3.1.2
|
||||
pulsectl==23.5.2
|
||||
pyudev==0.24.1
|
||||
ruamel.yaml==0.17.21
|
||||
securetar==2023.3.0
|
||||
sentry-sdk==1.25.0
|
||||
voluptuous==0.13.1
|
||||
dbus-fast==1.86.0
|
||||
typing_extensions==4.6.3
|
||||
|
@ -1,16 +1,16 @@
|
||||
astroid==3.3.11
|
||||
coverage==7.10.1
|
||||
mypy==1.17.0
|
||||
pre-commit==4.2.0
|
||||
pylint==3.3.7
|
||||
pytest-aiohttp==1.1.0
|
||||
pytest-asyncio==0.25.2
|
||||
pytest-cov==6.2.1
|
||||
pytest-timeout==2.4.0
|
||||
pytest==8.4.1
|
||||
ruff==0.12.7
|
||||
time-machine==2.16.0
|
||||
types-docker==7.1.0.20250705
|
||||
types-pyyaml==6.0.12.20250516
|
||||
types-requests==2.32.4.20250611
|
||||
urllib3==2.5.0
|
||||
black==23.3.0
|
||||
coverage==7.2.7
|
||||
flake8-docstrings==1.7.0
|
||||
flake8==6.0.0
|
||||
pre-commit==3.3.2
|
||||
pydocstyle==6.3.0
|
||||
pylint==2.17.4
|
||||
pytest-aiohttp==1.0.4
|
||||
pytest-asyncio==0.18.3
|
||||
pytest-cov==4.1.0
|
||||
pytest-timeout==2.1.0
|
||||
pytest==7.3.1
|
||||
pyupgrade==3.4.0
|
||||
time-machine==2.9.0
|
||||
typing_extensions==4.6.3
|
||||
urllib3==1.26.15
|
||||
|
@ -15,7 +15,7 @@ do
|
||||
if [[ "${supervisor_state}" = "running" ]]; then
|
||||
|
||||
# Check API
|
||||
if bashio::supervisor.ping > /dev/null; then
|
||||
if bashio::supervisor.ping; then
|
||||
failed_count=0
|
||||
else
|
||||
bashio::log.warning "Maybe found an issue on API healthy"
|
||||
|
4
rootfs/root/.cas-trusted-signing-pub-key
Normal file
4
rootfs/root/.cas-trusted-signing-pub-key
Normal file
@ -0,0 +1,4 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
|
||||
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
|
||||
-----END PUBLIC KEY-----
|
8
rootfs/root/.cas/config.json
Normal file
8
rootfs/root/.cas/config.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"currentcontext": {
|
||||
"LcHost": "cas.codenotary.com",
|
||||
"LcPort": "443"
|
||||
},
|
||||
"schemaversion": 3,
|
||||
"users": null
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
set -eu
|
||||
|
||||
# Used in venv activate script.
|
||||
# Would be an error if undefined.
|
||||
OSTYPE="${OSTYPE-}"
|
||||
|
||||
# Activate pyenv and virtualenv if present, then run the specified command
|
||||
|
||||
# pyenv, pyenv-virtualenv
|
||||
if [ -s .python-version ]; then
|
||||
PYENV_VERSION=$(head -n 1 .python-version)
|
||||
export PYENV_VERSION
|
||||
fi
|
||||
|
||||
if [ -n "${VIRTUAL_ENV-}" ] && [ -f "${VIRTUAL_ENV}/bin/activate" ]; then
|
||||
. "${VIRTUAL_ENV}/bin/activate"
|
||||
else
|
||||
# other common virtualenvs
|
||||
my_path=$(git rev-parse --show-toplevel)
|
||||
|
||||
for venv in venv .venv .; do
|
||||
if [ -f "${my_path}/${venv}/bin/activate" ]; then
|
||||
. "${my_path}/${venv}/bin/activate"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
exec "$@"
|
30
scripts/update-frontend.sh
Executable file
30
scripts/update-frontend.sh
Executable file
@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
source "/etc/supervisor_scripts/common"
|
||||
|
||||
set -e
|
||||
|
||||
# Update frontend
|
||||
git submodule update --init --recursive --remote
|
||||
|
||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
||||
cd home-assistant-polymer
|
||||
nvm install
|
||||
script/bootstrap
|
||||
|
||||
# Download translations
|
||||
start_docker
|
||||
./script/translations_download
|
||||
|
||||
# build frontend
|
||||
cd hassio
|
||||
./script/build_hassio
|
||||
|
||||
# Copy frontend
|
||||
rm -rf ../../supervisor/api/panel/*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
||||
|
||||
# Reset frontend git
|
||||
cd ..
|
||||
git reset --hard HEAD
|
||||
|
||||
stop_docker
|
31
setup.cfg
Normal file
31
setup.cfg
Normal file
@ -0,0 +1,31 @@
|
||||
[isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
line_length=88
|
||||
indent = " "
|
||||
force_sort_within_sections = true
|
||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
known_first_party = supervisor,tests
|
||||
|
||||
[flake8]
|
||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||
doctests = True
|
||||
max-line-length = 88
|
||||
# E501: line too long
|
||||
# W503: Line break occurred before a binary operator
|
||||
# E203: Whitespace before ':'
|
||||
# D202 No blank lines allowed after function docstring
|
||||
# W504 line break after binary operator
|
||||
ignore =
|
||||
E501,
|
||||
W503,
|
||||
E203,
|
||||
D202,
|
||||
W504
|
||||
per-file-ignores =
|
||||
tests/dbus_service_mocks/*.py: F821,F722
|
76
setup.py
76
setup.py
@ -1,28 +1,60 @@
|
||||
"""Home Assistant Supervisor setup."""
|
||||
|
||||
from pathlib import Path
|
||||
import re
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
|
||||
|
||||
SUPERVISOR_DIR = Path(__file__).parent
|
||||
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
|
||||
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
|
||||
|
||||
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
|
||||
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
|
||||
|
||||
|
||||
def _get_supervisor_version():
|
||||
for line in CONSTANTS.split("/n"):
|
||||
if match := RE_SUPERVISOR_VERSION.match(line):
|
||||
return match.group(1)
|
||||
return "9999.09.9.dev9999"
|
||||
|
||||
from supervisor.const import SUPERVISOR_VERSION
|
||||
|
||||
setup(
|
||||
version=_get_supervisor_version(),
|
||||
dependencies=REQUIREMENTS.split("/n"),
|
||||
name="Supervisor",
|
||||
version=SUPERVISOR_VERSION,
|
||||
license="BSD License",
|
||||
author="The Home Assistant Authors",
|
||||
author_email="hello@home-assistant.io",
|
||||
url="https://home-assistant.io/",
|
||||
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
|
||||
long_description=(
|
||||
"A maintainless private cloud operator system that"
|
||||
"setup a Home-Assistant instance. Based on HassOS"
|
||||
),
|
||||
classifiers=[
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Topic :: Home Automation",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
],
|
||||
keywords=["docker", "home-assistant", "api"],
|
||||
zip_safe=False,
|
||||
platforms="any",
|
||||
packages=[
|
||||
"supervisor.addons",
|
||||
"supervisor.api",
|
||||
"supervisor.backups",
|
||||
"supervisor.dbus.network",
|
||||
"supervisor.dbus.network.setting",
|
||||
"supervisor.dbus",
|
||||
"supervisor.discovery.services",
|
||||
"supervisor.discovery",
|
||||
"supervisor.docker",
|
||||
"supervisor.homeassistant",
|
||||
"supervisor.host",
|
||||
"supervisor.jobs",
|
||||
"supervisor.misc",
|
||||
"supervisor.plugins",
|
||||
"supervisor.resolution.checks",
|
||||
"supervisor.resolution.evaluations",
|
||||
"supervisor.resolution.fixups",
|
||||
"supervisor.resolution",
|
||||
"supervisor.security",
|
||||
"supervisor.services.modules",
|
||||
"supervisor.services",
|
||||
"supervisor.store",
|
||||
"supervisor.utils",
|
||||
"supervisor",
|
||||
],
|
||||
include_package_data=True,
|
||||
)
|
||||
|
@ -1,22 +1,11 @@
|
||||
"""Main file for Supervisor."""
|
||||
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
import zlib_fast
|
||||
|
||||
# Enable fast zlib before importing supervisor
|
||||
zlib_fast.enable()
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
from supervisor import bootstrap # noqa: E402
|
||||
from supervisor.utils.blockbuster import BlockBusterManager # noqa: E402
|
||||
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
|
||||
|
||||
# pylint: enable=wrong-import-position
|
||||
from supervisor import bootstrap
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -49,16 +38,13 @@ if __name__ == "__main__":
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
activate_log_queue_handler()
|
||||
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.set_debug(coresys.config.debug)
|
||||
if coresys.config.detect_blocking_io:
|
||||
BlockBusterManager.activate()
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
loop.run_until_complete(bootstrap.supervisor_debugger(coresys))
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
# Signal health startup for container
|
||||
run_os_startup_check_cleanup()
|
||||
@ -66,15 +52,8 @@ if __name__ == "__main__":
|
||||
_LOGGER.info("Setting up Supervisor")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
bootstrap.register_signal_handlers(loop, coresys)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(coresys.core.start())
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Supervisor itself is running at this point, just something didn't
|
||||
# start as expected. Log with traceback to get more insights for
|
||||
# such cases.
|
||||
_LOGGER.critical("Supervisor start failed: %s", err, exc_info=True)
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, coresys)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Running Supervisor")
|
||||
|
@ -1 +1,427 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Union
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
DockerError,
|
||||
DockerNotFound,
|
||||
HomeAssistantAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils import check_exception_chain
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: dict[str, Addon] = {}
|
||||
self.store: dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def all(self) -> list[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def installed(self) -> list[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
if not local_only:
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def from_token(self, token: str) -> Addon | None:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
tasks = []
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(self.sys_create_task(addon.load()))
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# Sync DNS
|
||||
await self.sync_dns()
|
||||
|
||||
async def boot(self, stage: AddonStartup) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be started
|
||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Start Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.start()
|
||||
except AddonsError as err:
|
||||
# Check if there is an system/user issue
|
||||
if check_exception_chain(
|
||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
||||
):
|
||||
addon.boot = AddonBoot.MANUAL
|
||||
addon.save_persist()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
capture_exception(err)
|
||||
else:
|
||||
continue
|
||||
|
||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage: AddonStartup) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be stopped
|
||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Stop Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
capture_exception(err)
|
||||
|
||||
@Job(
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||
|
||||
store.validate_availability()
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
await addon.load()
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", addon.path_data
|
||||
)
|
||||
addon.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
try:
|
||||
await addon.instance.install(store.version, store.image, arch=addon.arch)
|
||||
except DockerError as err:
|
||||
self.data.uninstall(addon)
|
||||
raise AddonsError() from err
|
||||
|
||||
self.local[slug] = addon
|
||||
|
||||
# Reload ingress tokens
|
||||
if addon.with_ingress:
|
||||
await self.sys_ingress.reload()
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
async def uninstall(self, slug: str) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
addon = self.local[slug]
|
||||
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
addon.state = AddonState.UNKNOWN
|
||||
|
||||
await addon.unload()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
addon.path_pulse.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
await addon.uninstall_apparmor()
|
||||
|
||||
# Cleanup Ingress panel from sidebar
|
||||
if addon.ingress_panel:
|
||||
addon.ingress_panel = False
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
if addon.with_ingress:
|
||||
self.sys_create_task(self.sys_ingress.reload())
|
||||
self.sys_ingress.del_dynamic_port(slug)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != addon.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
if addon.slug not in service.active:
|
||||
continue
|
||||
service.del_service_data(addon)
|
||||
|
||||
self.data.uninstall(addon)
|
||||
self.local.pop(slug)
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
@Job(
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str, backup: bool | None = False) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
if addon.version == store.version:
|
||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
store.validate_availability()
|
||||
|
||||
if backup:
|
||||
await self.sys_backups.do_backup_partial(
|
||||
name=f"addon_{addon.slug}_{addon.version}",
|
||||
homeassistant=False,
|
||||
addons=[addon.slug],
|
||||
)
|
||||
|
||||
# Update instance
|
||||
last_state: AddonState = addon.state
|
||||
old_image = addon.image
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||
self.data.update(store)
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerError):
|
||||
await addon.instance.cleanup(old_image=old_image)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == AddonState.STARTED:
|
||||
await addon.start()
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
raise AddonsError(
|
||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||
)
|
||||
if not addon.need_build:
|
||||
raise AddonsNotSupportedError(
|
||||
"Can't rebuild a image based add-on", _LOGGER.error
|
||||
)
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state: AddonState = addon.state
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
await addon.instance.install(addon.version)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||
|
||||
# restore state
|
||||
if last_state == AddonState.STARTED:
|
||||
await addon.start()
|
||||
|
||||
@Job(
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug not in self.local:
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
||||
# Update ingress
|
||||
if addon.with_ingress:
|
||||
await self.sys_ingress.reload()
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: list[Addon] = []
|
||||
|
||||
# Evaluate Add-ons to repair
|
||||
for addon in self.installed:
|
||||
if await addon.instance.exists():
|
||||
continue
|
||||
needs_repair.append(addon)
|
||||
|
||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||
if not needs_repair:
|
||||
return
|
||||
|
||||
for addon in needs_repair:
|
||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||
with suppress(DockerError, KeyError):
|
||||
# Need pull a image again
|
||||
if not addon.need_build:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
# Need local lookup
|
||||
if addon.need_build and not addon.is_detached:
|
||||
store = self.store[addon.slug]
|
||||
# If this add-on is available for rebuild
|
||||
if addon.version == store.version:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
_LOGGER.error("Can't repair %s", addon.slug)
|
||||
with suppress(AddonsError):
|
||||
await self.uninstall(addon.slug)
|
||||
|
||||
async def sync_dns(self) -> None:
|
||||
"""Sync add-ons DNS names."""
|
||||
# Update hosts
|
||||
for addon in self.installed:
|
||||
try:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
except DockerError as err:
|
||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_DOCKER,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
capture_exception(err)
|
||||
else:
|
||||
self.sys_plugins.dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
|
||||
# Write hosts files
|
||||
with suppress(CoreDNSError):
|
||||
self.sys_plugins.dns.write_hosts()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,10 +1,9 @@
|
||||
"""Supervisor add-on build environment."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@ -15,7 +14,6 @@ from ..const import (
|
||||
ATTR_SQUASH,
|
||||
FILE_SUFFIX_CONFIGURATION,
|
||||
META_ADDON,
|
||||
SOCKET_DOCKER,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.interface import MAP_ARCH
|
||||
@ -24,7 +22,7 @@ from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import AnyAddon
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
@ -35,36 +33,23 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
||||
# Search for build file later in executor
|
||||
super().__init__(None, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def _get_build_file(self) -> Path:
|
||||
"""Get build file.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
try:
|
||||
return find_one_filetype(
|
||||
build_file = find_one_filetype(
|
||||
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
||||
)
|
||||
except ConfigurationFileError:
|
||||
return self.addon.path_location / "build.json"
|
||||
build_file = self.addon.path_location / "build.json"
|
||||
|
||||
async def read_data(self) -> None:
|
||||
"""Load data from file."""
|
||||
if not self._file:
|
||||
self._file = await self.sys_run_in_executor(self._get_build_file)
|
||||
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
await super().read_data()
|
||||
|
||||
async def save_data(self):
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
raise RuntimeError()
|
||||
|
||||
@cached_property
|
||||
def arch(self) -> str:
|
||||
"""Return arch of the add-on."""
|
||||
return self.sys_arch.match([self.addon.arch])
|
||||
return self.sys_arch.match(self.addon.arch)
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
@ -82,6 +67,13 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
return self._data[ATTR_BUILD_FROM][self.arch]
|
||||
|
||||
@property
|
||||
def dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path."""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
@ -97,89 +89,49 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"""Return additional Docker labels."""
|
||||
return self._data[ATTR_LABELS]
|
||||
|
||||
def get_dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
async def is_valid(self) -> bool:
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
|
||||
def build_is_valid() -> bool:
|
||||
try:
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
self.get_dockerfile().is_file(),
|
||||
self.dockerfile.is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
try:
|
||||
return await self.sys_run_in_executor(build_is_valid)
|
||||
except HassioArchNotFound:
|
||||
return False
|
||||
|
||||
def get_docker_args(
|
||||
self, version: AwesomeVersion, image_tag: str
|
||||
) -> dict[str, Any]:
|
||||
"""Create a dict with Docker run args."""
|
||||
dockerfile_path = self.get_dockerfile().relative_to(self.addon.path_location)
|
||||
|
||||
build_cmd = [
|
||||
"docker",
|
||||
"buildx",
|
||||
"build",
|
||||
".",
|
||||
"--tag",
|
||||
image_tag,
|
||||
"--file",
|
||||
str(dockerfile_path),
|
||||
"--platform",
|
||||
MAP_ARCH[self.arch],
|
||||
"--pull",
|
||||
]
|
||||
|
||||
labels = {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.arch,
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
**self.additional_labels,
|
||||
def get_docker_args(self, version: AwesomeVersion):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version!s}",
|
||||
"dockerfile": str(self.dockerfile),
|
||||
"pull": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
"squash": self.squash,
|
||||
"platform": MAP_ARCH[self.arch],
|
||||
"labels": {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.arch,
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
**self.additional_labels,
|
||||
},
|
||||
"buildargs": {
|
||||
"BUILD_FROM": self.base_image,
|
||||
"BUILD_VERSION": version,
|
||||
"BUILD_ARCH": self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
},
|
||||
}
|
||||
|
||||
if self.addon.url:
|
||||
labels["io.hass.url"] = self.addon.url
|
||||
args["labels"]["io.hass.url"] = self.addon.url
|
||||
|
||||
for key, value in labels.items():
|
||||
build_cmd.extend(["--label", f"{key}={value}"])
|
||||
|
||||
build_args = {
|
||||
"BUILD_FROM": self.base_image,
|
||||
"BUILD_VERSION": version,
|
||||
"BUILD_ARCH": self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
}
|
||||
|
||||
for key, value in build_args.items():
|
||||
build_cmd.extend(["--build-arg", f"{key}={value}"])
|
||||
|
||||
# The addon path will be mounted from the host system
|
||||
addon_extern_path = self.sys_config.local_to_extern_path(
|
||||
self.addon.path_location
|
||||
)
|
||||
|
||||
return {
|
||||
"command": build_cmd,
|
||||
"volumes": {
|
||||
SOCKET_DOCKER: {"bind": "/var/run/docker.sock", "mode": "rw"},
|
||||
addon_extern_path: {"bind": "/addon", "mode": "ro"},
|
||||
},
|
||||
"working_dir": "/addon",
|
||||
}
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name: str) -> str:
|
||||
"""Remove characters they are not supported."""
|
||||
|
@ -1,11 +0,0 @@
|
||||
"""Confgiuration Objects for Addon Config."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class FolderMapping:
|
||||
"""Represent folder mapping configuration."""
|
||||
|
||||
path: str | None
|
||||
read_only: bool
|
@ -1,38 +1,19 @@
|
||||
"""Add-on static data."""
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum
|
||||
from enum import Enum
|
||||
|
||||
from ..jobs.const import JobCondition
|
||||
|
||||
|
||||
class AddonBackupMode(StrEnum):
|
||||
class AddonBackupMode(str, Enum):
|
||||
"""Backup mode of an Add-on."""
|
||||
|
||||
HOT = "hot"
|
||||
COLD = "cold"
|
||||
|
||||
|
||||
class MappingType(StrEnum):
|
||||
"""Mapping type of an Add-on Folder."""
|
||||
|
||||
DATA = "data"
|
||||
CONFIG = "config"
|
||||
SSL = "ssl"
|
||||
ADDONS = "addons"
|
||||
BACKUP = "backup"
|
||||
SHARE = "share"
|
||||
MEDIA = "media"
|
||||
HOMEASSISTANT_CONFIG = "homeassistant_config"
|
||||
ALL_ADDON_CONFIGS = "all_addon_configs"
|
||||
ADDON_CONFIG = "addon_config"
|
||||
|
||||
|
||||
ATTR_BACKUP = "backup"
|
||||
ATTR_BREAKING_VERSIONS = "breaking_versions"
|
||||
ATTR_CODENOTARY = "codenotary"
|
||||
ATTR_READ_ONLY = "read_only"
|
||||
ATTR_PATH = "path"
|
||||
WATCHDOG_RETRY_SECONDS = 10
|
||||
WATCHDOG_MAX_ATTEMPTS = 5
|
||||
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor add-on data."""
|
||||
|
||||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
@ -38,7 +37,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
async def install(self, addon: AddonStore) -> None:
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
@ -46,28 +45,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
||||
async def uninstall(self, addon: Addon) -> None:
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
||||
async def update(self, addon: AddonStore) -> None:
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update(
|
||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||
)
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
||||
async def restore(
|
||||
self, slug: str, user: Config, system: Config, image: str
|
||||
) -> None:
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
@ -1,408 +0,0 @@
|
||||
"""Supervisor add-on manager."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Self, Union
|
||||
|
||||
from attr import evolve
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerError,
|
||||
HassioError,
|
||||
HomeAssistantAPIError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: dict[str, Addon] = {}
|
||||
self.store: dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def all(self) -> list[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def installed(self) -> list[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
if not local_only:
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def get_local_only(self, addon_slug: str) -> Addon | None:
|
||||
"""Return an installed add-on from slug."""
|
||||
return self.local.get(addon_slug)
|
||||
|
||||
def from_token(self, token: str) -> Addon | None:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.data.read_data()
|
||||
return self
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
# Refresh cache for all store addons
|
||||
tasks: list[Awaitable[None]] = [
|
||||
store.refresh_path_cache() for store in self.store.values()
|
||||
]
|
||||
|
||||
# Load all installed addons
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(addon.load())
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
|
||||
if tasks:
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
# Sync DNS
|
||||
await self.sync_dns()
|
||||
|
||||
async def boot(self, stage: AddonStartup) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be started
|
||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Start Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
# Config.wait_boot is deprecated. Until addons update with healthchecks,
|
||||
# add a sleep task for it to keep the same minimum amount of wait time
|
||||
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
|
||||
for addon in tasks:
|
||||
try:
|
||||
if start_task := await addon.start():
|
||||
wait_boot.append(start_task)
|
||||
except HassioError:
|
||||
self.sys_resolution.add_issue(
|
||||
evolve(addon.boot_failed_issue),
|
||||
suggestions=[
|
||||
SuggestionType.EXECUTE_START,
|
||||
SuggestionType.DISABLE_BOOT,
|
||||
],
|
||||
)
|
||||
else:
|
||||
continue
|
||||
|
||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||
|
||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||
await asyncio.gather(*wait_boot, return_exceptions=True)
|
||||
|
||||
# After waiting for startup, create an issue for boot addons that are error or unknown state
|
||||
# Ignore stopped as single shot addons can be run at boot and this is successful exit
|
||||
# Timeout waiting for startup is not a failure, addon is probably just slow
|
||||
for addon in tasks:
|
||||
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
|
||||
self.sys_resolution.add_issue(
|
||||
evolve(addon.boot_failed_issue),
|
||||
suggestions=[
|
||||
SuggestionType.EXECUTE_START,
|
||||
SuggestionType.DISABLE_BOOT,
|
||||
],
|
||||
)
|
||||
|
||||
async def shutdown(self, stage: AddonStartup) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon)
|
||||
|
||||
# Evaluate add-ons which need to be stopped
|
||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
# Stop Add-ons sequential
|
||||
# avoid issue on slow IO
|
||||
for addon in tasks:
|
||||
try:
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
await async_capture_exception(err)
|
||||
|
||||
@Job(
|
||||
name="addon_manager_install",
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||
|
||||
store.validate_availability()
|
||||
|
||||
await Addon(self.coresys, slug).install()
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
@Job(name="addon_manager_uninstall")
|
||||
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
|
||||
shared_image = any(
|
||||
self.local[slug].image == addon.image
|
||||
and self.local[slug].version == addon.version
|
||||
for addon in self.installed
|
||||
if addon.slug != slug
|
||||
)
|
||||
await self.local[slug].uninstall(
|
||||
remove_config=remove_config, remove_image=not shared_image
|
||||
)
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
@Job(
|
||||
name="addon_manager_update",
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(
|
||||
self, slug: str, backup: bool | None = False
|
||||
) -> asyncio.Task | None:
|
||||
"""Update add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
if addon is started after update. Else nothing is returned.
|
||||
"""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
if addon.version == store.version:
|
||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
store.validate_availability()
|
||||
|
||||
if backup:
|
||||
await self.sys_backups.do_backup_partial(
|
||||
name=f"addon_{addon.slug}_{addon.version}",
|
||||
homeassistant=False,
|
||||
addons=[addon.slug],
|
||||
)
|
||||
|
||||
return await addon.update()
|
||||
|
||||
@Job(
|
||||
name="addon_manager_rebuild",
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str, *, force: bool = False) -> asyncio.Task | None:
|
||||
"""Perform a rebuild of local build add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
if addon is started after rebuild. Else nothing is returned.
|
||||
"""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug not in self.local:
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
raise AddonsError(
|
||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||
)
|
||||
if not force and not addon.need_build:
|
||||
raise AddonsNotSupportedError(
|
||||
"Can't rebuild a image based add-on", _LOGGER.error
|
||||
)
|
||||
|
||||
return await addon.rebuild()
|
||||
|
||||
@Job(
|
||||
name="addon_manager_restore",
|
||||
conditions=[
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(
|
||||
self, slug: str, tar_file: tarfile.TarFile
|
||||
) -> asyncio.Task | None:
|
||||
"""Restore state of an add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
if addon is started after restore. Else nothing is returned.
|
||||
"""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
had_ingress: bool | None = False
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
had_ingress = addon.ingress_panel
|
||||
|
||||
wait_for_start = await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug not in self.local:
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
||||
# Update ingress
|
||||
if had_ingress != addon.ingress_panel:
|
||||
await self.sys_ingress.reload()
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
return wait_for_start
|
||||
|
||||
@Job(
|
||||
name="addon_manager_repair",
|
||||
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
|
||||
)
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: list[Addon] = []
|
||||
|
||||
# Evaluate Add-ons to repair
|
||||
for addon in self.installed:
|
||||
if await addon.instance.exists():
|
||||
continue
|
||||
needs_repair.append(addon)
|
||||
|
||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||
if not needs_repair:
|
||||
return
|
||||
|
||||
for addon in needs_repair:
|
||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||
with suppress(DockerError, KeyError):
|
||||
# Need pull a image again
|
||||
if not addon.need_build:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
# Need local lookup
|
||||
if addon.need_build and not addon.is_detached:
|
||||
store = self.store[addon.slug]
|
||||
# If this add-on is available for rebuild
|
||||
if addon.version == store.version:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
_LOGGER.error("Can't repair %s", addon.slug)
|
||||
with suppress(AddonsError):
|
||||
await self.uninstall(addon.slug)
|
||||
|
||||
async def sync_dns(self) -> None:
|
||||
"""Sync add-ons DNS names."""
|
||||
# Update hosts
|
||||
add_host_coros: list[Awaitable[None]] = []
|
||||
for addon in self.installed:
|
||||
try:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
except DockerError as err:
|
||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_DOCKER,
|
||||
ContextType.ADDON,
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
await async_capture_exception(err)
|
||||
else:
|
||||
add_host_coros.append(
|
||||
self.sys_plugins.dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
)
|
||||
|
||||
await asyncio.gather(*add_host_coros)
|
||||
|
||||
# Write hosts files
|
||||
with suppress(CoreDNSError):
|
||||
await self.sys_plugins.dns.write_hosts()
|
@ -1,18 +1,13 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from collections.abc import Awaitable, Callable
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||
|
||||
from supervisor.utils.dt import utc_from_timestamp
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
@ -47,7 +42,7 @@ from ..const import (
|
||||
ATTR_JOURNALD,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATION,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
@ -69,13 +64,11 @@ from ..const import (
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_TYPE,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_TIMESTAMP,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WEBUI,
|
||||
@ -83,47 +76,28 @@ from ..const import (
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
AddonBoot,
|
||||
AddonBootConfig,
|
||||
AddonStage,
|
||||
AddonStartup,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.const import Capabilities
|
||||
from ..exceptions import AddonsNotSupportedError
|
||||
from ..jobs.const import JOB_GROUP_ADDON
|
||||
from ..jobs.job_group import JobGroup
|
||||
from ..utils import version_is_new_enough
|
||||
from .configuration import FolderMapping
|
||||
from .const import (
|
||||
ATTR_BACKUP,
|
||||
ATTR_BREAKING_VERSIONS,
|
||||
ATTR_CODENOTARY,
|
||||
ATTR_PATH,
|
||||
ATTR_READ_ONLY,
|
||||
AddonBackupMode,
|
||||
MappingType,
|
||||
)
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, AddonBackupMode
|
||||
from .options import AddonOptions, UiOptions
|
||||
from .validate import RE_SERVICE
|
||||
from .validate import RE_SERVICE, RE_VOLUME
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
Data = dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(JobGroup, ABC):
|
||||
class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(
|
||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
||||
)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.slug: str = slug
|
||||
self._path_icon_exists: bool = False
|
||||
self._path_logo_exists: bool = False
|
||||
self._path_changelog_exists: bool = False
|
||||
self._path_documentation_exists: bool = False
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
@ -150,15 +124,10 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return options with local changes."""
|
||||
return self.data[ATTR_OPTIONS]
|
||||
|
||||
@property
|
||||
def boot_config(self) -> AddonBootConfig:
|
||||
"""Return boot config."""
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@property
|
||||
def boot(self) -> AddonBoot:
|
||||
"""Return boot config with prio local settings unless config is forced."""
|
||||
return AddonBoot(self.data[ATTR_BOOT])
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@property
|
||||
def auto_update(self) -> bool | None:
|
||||
@ -210,6 +179,18 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return description of add-on."""
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
return readme.read_text(encoding="utf-8")
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
"""Return repository of add-on."""
|
||||
@ -225,11 +206,6 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def latest_version_timestamp(self) -> datetime:
|
||||
"""Return when latest version was first seen."""
|
||||
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
||||
|
||||
@property
|
||||
def version(self) -> AwesomeVersion:
|
||||
"""Return version of add-on."""
|
||||
@ -294,7 +270,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def watchdog_url(self) -> str | None:
|
||||
def watchdog(self) -> str | None:
|
||||
"""Return URL to for watchdog or None."""
|
||||
return self.data.get(ATTR_WATCHDOG)
|
||||
|
||||
@ -510,22 +486,22 @@ class AddonModel(JobGroup, ABC):
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
return self._path_icon_exists
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
return self._path_logo_exists
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
return self._path_changelog_exists
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
return self._path_documentation_exists
|
||||
return self.path_documentation.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> list[str]:
|
||||
@ -556,20 +532,21 @@ class AddonModel(JobGroup, ABC):
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def map_volumes(self) -> dict[MappingType, FolderMapping]:
|
||||
"""Return a dict of {MappingType: FolderMapping} from add-on."""
|
||||
def map_volumes(self) -> dict[str, bool]:
|
||||
"""Return a dict of {volume: read-only} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
|
||||
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
|
||||
)
|
||||
result = RE_VOLUME.match(volume)
|
||||
if not result:
|
||||
continue
|
||||
volumes[result.group(1)] = result.group(2) != "rw"
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_location(self) -> Path:
|
||||
"""Return path to this add-on."""
|
||||
return Path(self.data[ATTR_LOCATION])
|
||||
return Path(self.data[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self) -> Path:
|
||||
@ -606,7 +583,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> list[dict[Any, Any]] | None:
|
||||
def schema_ui(self) -> list[dict[any, any]] | None:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
@ -629,51 +606,16 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return Signer email address for CAS."""
|
||||
return self.data.get(ATTR_CODENOTARY)
|
||||
|
||||
@property
|
||||
def breaking_versions(self) -> list[AwesomeVersion]:
|
||||
"""Return breaking versions of addon."""
|
||||
return self.data[ATTR_BREAKING_VERSIONS]
|
||||
|
||||
async def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
|
||||
def read_readme() -> str | None:
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
return readme.read_text(encoding="utf-8")
|
||||
|
||||
return await self.sys_run_in_executor(read_readme)
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
|
||||
def check_paths():
|
||||
self._path_icon_exists = self.path_icon.exists()
|
||||
self._path_logo_exists = self.path_logo.exists()
|
||||
self._path_changelog_exists = self.path_changelog.exists()
|
||||
self._path_documentation_exists = self.path_documentation.exists()
|
||||
|
||||
return self.sys_run_in_executor(check_paths)
|
||||
|
||||
def validate_availability(self) -> None:
|
||||
"""Validate if addon is available for current system."""
|
||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compare add-on objects."""
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
if not isinstance(other, AddonModel):
|
||||
return False
|
||||
return self.slug == other.slug
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""Hash for add-on objects."""
|
||||
return hash(self.slug)
|
||||
|
||||
def _validate_availability(
|
||||
self, config, *, logger: Callable[..., None] | None = None
|
||||
) -> None:
|
||||
@ -698,9 +640,7 @@ class AddonModel(JobGroup, ABC):
|
||||
# Home Assistant
|
||||
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
||||
with suppress(AwesomeVersionException, TypeError):
|
||||
if version and not version_is_new_enough(
|
||||
self.sys_homeassistant.version, version
|
||||
):
|
||||
if self.sys_homeassistant.version < version:
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
||||
logger,
|
||||
@ -724,3 +664,19 @@ class AddonModel(JobGroup, ABC):
|
||||
|
||||
# local build
|
||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||
|
||||
def install(self) -> Awaitable[None]:
|
||||
"""Install this add-on."""
|
||||
return self.sys_addons.install(self.slug)
|
||||
|
||||
def uninstall(self) -> Awaitable[None]:
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self, backup: bool | None = False) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug, backup=backup)
|
||||
|
||||
def rebuild(self) -> Awaitable[None]:
|
||||
"""Rebuild this add-on."""
|
||||
return self.sys_addons.rebuild(self.slug)
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Add-on Options / UI rendering."""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@ -137,7 +136,7 @@ class AddonOptions(CoreSysAttributes):
|
||||
) from None
|
||||
|
||||
# prepare range
|
||||
range_args: dict[str, Any] = {}
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
@ -390,14 +389,14 @@ class UiOptions(CoreSysAttributes):
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node: dict[str, Any] = {
|
||||
ui_node = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema: list[dict[str, Any]] = []
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
@ -413,7 +412,7 @@ def _create_device_filter(str_filter: str) -> dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter: dict[str, Any] = {}
|
||||
clean_filter = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
|
@ -1,10 +1,9 @@
|
||||
"""Util add-ons functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
||||
@ -46,7 +45,6 @@ def rating_security(addon: AddonModel) -> int:
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
Capabilities.BPF,
|
||||
Capabilities.CHECKPOINT_RESTORE,
|
||||
Capabilities.DAC_READ_SEARCH,
|
||||
Capabilities.NET_ADMIN,
|
||||
Capabilities.NET_RAW,
|
||||
@ -86,20 +84,18 @@ def rating_security(addon: AddonModel) -> int:
|
||||
return max(min(8, rating), 1)
|
||||
|
||||
|
||||
def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
except subprocess.CalledProcessError as procerr:
|
||||
error_msg = procerr.stderr.strip()
|
||||
else:
|
||||
return
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Validate add-ons options schema."""
|
||||
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
@ -55,7 +54,7 @@ from ..const import (
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LABELS,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATION,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
@ -79,12 +78,9 @@ from ..const import (
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_SYSTEM_MANAGED,
|
||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_TYPE,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
@ -98,11 +94,11 @@ from ..const import (
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
AddonBoot,
|
||||
AddonBootConfig,
|
||||
AddonStage,
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
)
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..docker.const import Capabilities
|
||||
from ..validate import (
|
||||
docker_image,
|
||||
@ -113,23 +109,12 @@ from ..validate import (
|
||||
uuid_match,
|
||||
version_tag,
|
||||
)
|
||||
from .const import (
|
||||
ATTR_BACKUP,
|
||||
ATTR_BREAKING_VERSIONS,
|
||||
ATTR_CODENOTARY,
|
||||
ATTR_PATH,
|
||||
ATTR_READ_ONLY,
|
||||
RE_SLUG,
|
||||
AddonBackupMode,
|
||||
MappingType,
|
||||
)
|
||||
from .const import ATTR_BACKUP, ATTR_CODENOTARY, RE_SLUG, AddonBackupMode
|
||||
from .options import RE_SCHEMA_ELEMENT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
RE_VOLUME = re.compile(
|
||||
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
|
||||
)
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
|
||||
@ -158,9 +143,6 @@ RE_MACHINE = re.compile(
|
||||
r"|raspberrypi3"
|
||||
r"|raspberrypi4-64"
|
||||
r"|raspberrypi4"
|
||||
r"|raspberrypi5-64"
|
||||
r"|yellow"
|
||||
r"|green"
|
||||
r"|tinker"
|
||||
r")$"
|
||||
)
|
||||
@ -214,9 +196,9 @@ def _migrate_addon_config(protocol=False):
|
||||
name,
|
||||
)
|
||||
if value == "before":
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
||||
elif value == "after":
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
||||
|
||||
# UART 2021-01-20
|
||||
if "auto_uart" in config:
|
||||
@ -262,48 +244,6 @@ def _migrate_addon_config(protocol=False):
|
||||
name,
|
||||
)
|
||||
|
||||
# 2023-11 "map" entries can also be dict to allow path configuration
|
||||
volumes = []
|
||||
for entry in config.get(ATTR_MAP, []):
|
||||
if isinstance(entry, dict):
|
||||
volumes.append(entry)
|
||||
if isinstance(entry, str):
|
||||
result = RE_VOLUME.match(entry)
|
||||
if not result:
|
||||
continue
|
||||
volumes.append(
|
||||
{
|
||||
ATTR_TYPE: result.group(1),
|
||||
ATTR_READ_ONLY: result.group(2) != "rw",
|
||||
}
|
||||
)
|
||||
|
||||
if volumes:
|
||||
config[ATTR_MAP] = volumes
|
||||
|
||||
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
|
||||
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
|
||||
if any(
|
||||
volume
|
||||
and volume[ATTR_TYPE]
|
||||
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
|
||||
for volume in volumes
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
|
||||
MappingType.ADDON_CONFIG,
|
||||
MappingType.HOMEASSISTANT_CONFIG,
|
||||
MappingType.CONFIG,
|
||||
name,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
|
||||
MappingType.CONFIG,
|
||||
MappingType.HOMEASSISTANT_CONFIG,
|
||||
name,
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
return _migrate
|
||||
@ -322,9 +262,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||
AddonStartup
|
||||
),
|
||||
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
|
||||
AddonBootConfig
|
||||
),
|
||||
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||
@ -354,15 +292,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_DEVICES): [str],
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP, default=list): [
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
|
||||
vol.Optional(ATTR_READ_ONLY, default=True): bool,
|
||||
vol.Optional(ATTR_PATH): str,
|
||||
}
|
||||
)
|
||||
],
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
@ -383,7 +313,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [str],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
||||
vol.Optional(ATTR_BACKUP_PRE): str,
|
||||
vol.Optional(ATTR_BACKUP_POST): str,
|
||||
@ -414,7 +344,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
@ -473,8 +402,6 @@ SCHEMA_ADDON_USER = vol.Schema(
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
@ -483,7 +410,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
|
||||
_migrate_addon_config(),
|
||||
_SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATION): str,
|
||||
vol.Required(ATTR_LOCATON): str,
|
||||
vol.Required(ATTR_REPOSITORY): str,
|
||||
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
||||
str: SCHEMA_ADDON_TRANSLATIONS
|
||||
|
@ -1,23 +1,19 @@
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import SUPERVISOR_DOCKER_NAME, AddonState
|
||||
from ..const import AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from ..exceptions import APIAddonNotInstalled
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
from .backups import APIBackups
|
||||
from .cli import APICli
|
||||
from .const import CONTENT_TYPE_TEXT
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
from .docker import APIDocker
|
||||
@ -39,7 +35,7 @@ from .security import APISecurity
|
||||
from .services import APIServices
|
||||
from .store import APIStore
|
||||
from .supervisor import APISupervisor
|
||||
from .utils import api_process, api_process_raw
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -48,14 +44,6 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
|
||||
MAX_LINE_SIZE: int = 24570
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StaticResourceConfig:
|
||||
"""Configuration for a static resource."""
|
||||
|
||||
prefix: str
|
||||
path: Path
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor."""
|
||||
|
||||
@ -78,17 +66,11 @@ class RestAPI(CoreSysAttributes):
|
||||
)
|
||||
|
||||
# service stuff
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
self._site: web.TCPSite | None = None
|
||||
|
||||
# share single host API handler for reuse in logging endpoints
|
||||
self._api_host: APIHost = APIHost()
|
||||
self._api_host.coresys = coresys
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
static_resource_configs: list[StaticResourceConfig] = []
|
||||
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
self._register_auth()
|
||||
@ -107,7 +89,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_network()
|
||||
self._register_observer()
|
||||
self._register_os()
|
||||
static_resource_configs.extend(self._register_panel())
|
||||
self._register_panel()
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_root()
|
||||
@ -116,54 +98,12 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_store()
|
||||
self._register_supervisor()
|
||||
|
||||
if static_resource_configs:
|
||||
|
||||
def process_configs() -> list[web.StaticResource]:
|
||||
return [
|
||||
web.StaticResource(config.prefix, config.path)
|
||||
for config in static_resource_configs
|
||||
]
|
||||
|
||||
for resource in await self.sys_run_in_executor(process_configs):
|
||||
self.webapp.router.register_resource(resource)
|
||||
|
||||
await self.start()
|
||||
|
||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get(
|
||||
f"{path}/logs",
|
||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/follow",
|
||||
partial(
|
||||
self._api_host.advanced_logs,
|
||||
identifier=syslog_identifier,
|
||||
follow=True,
|
||||
),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/boots/{{bootid}}",
|
||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/boots/{{bootid}}/follow",
|
||||
partial(
|
||||
self._api_host.advanced_logs,
|
||||
identifier=syslog_identifier,
|
||||
follow=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
api_host = self._api_host
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
@ -237,21 +177,15 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.get("/os/config/swap", api_os.config_swap_info),
|
||||
web.post("/os/config/swap", api_os.config_swap_options),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||
web.get("/os/datadisk/list", api_os.list_data),
|
||||
web.post("/os/datadisk/wipe", api_os.wipe_data),
|
||||
web.post("/os/boot-slot", api_os.set_boot_slot),
|
||||
]
|
||||
)
|
||||
|
||||
# Boards endpoints
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/os/boards/green", api_os.boards_green_info),
|
||||
web.post("/os/boards/green", api_os.boards_green_options),
|
||||
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
||||
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
||||
web.get("/os/boards/{board}", api_os.boards_other_info),
|
||||
@ -281,8 +215,6 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/jobs/info", api_jobs.info),
|
||||
web.post("/jobs/options", api_jobs.options),
|
||||
web.post("/jobs/reset", api_jobs.reset),
|
||||
web.get("/jobs/{uuid}", api_jobs.job_info),
|
||||
web.delete("/jobs/{uuid}", api_jobs.remove_job),
|
||||
]
|
||||
)
|
||||
|
||||
@ -321,11 +253,11 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/multicast/info", api_multicast.info),
|
||||
web.get("/multicast/stats", api_multicast.stats),
|
||||
web.get("/multicast/logs", api_multicast.logs),
|
||||
web.post("/multicast/update", api_multicast.update),
|
||||
web.post("/multicast/restart", api_multicast.restart),
|
||||
]
|
||||
)
|
||||
self._register_advanced_logs("/multicast", "hassio_multicast")
|
||||
|
||||
def _register_hardware(self) -> None:
|
||||
"""Register hardware functions."""
|
||||
@ -345,9 +277,6 @@ class RestAPI(CoreSysAttributes):
|
||||
api_root.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([web.get("/info", api_root.info)])
|
||||
self.webapp.add_routes([web.post("/reload_updates", api_root.reload_updates)])
|
||||
|
||||
# Discouraged
|
||||
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
|
||||
self.webapp.add_routes(
|
||||
[web.get("/available_updates", api_root.available_updates)]
|
||||
@ -401,7 +330,6 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/auth", api_auth.auth),
|
||||
web.post("/auth/reset", api_auth.reset),
|
||||
web.delete("/auth/cache", api_auth.cache),
|
||||
web.get("/auth/list", api_auth.list_users),
|
||||
]
|
||||
)
|
||||
|
||||
@ -415,6 +343,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/supervisor/ping", api_supervisor.ping),
|
||||
web.get("/supervisor/info", api_supervisor.info),
|
||||
web.get("/supervisor/stats", api_supervisor.stats),
|
||||
web.get("/supervisor/logs", api_supervisor.logs),
|
||||
web.post("/supervisor/update", api_supervisor.update),
|
||||
web.post("/supervisor/reload", api_supervisor.reload),
|
||||
web.post("/supervisor/restart", api_supervisor.restart),
|
||||
@ -423,39 +352,6 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
async def get_supervisor_logs(*args, **kwargs):
|
||||
try:
|
||||
return await self._api_host.advanced_logs_handler(
|
||||
*args, identifier=SUPERVISOR_DOCKER_NAME, **kwargs
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-exception-caught
|
||||
# Supervisor logs are critical, so catch everything, log the exception
|
||||
# and try to return Docker container logs as the fallback
|
||||
_LOGGER.exception(
|
||||
"Failed to get supervisor logs using advanced_logs API"
|
||||
)
|
||||
if not isinstance(err, HostNotSupportedError):
|
||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||
# is known and reported to the user using the resolution center.
|
||||
await async_capture_exception(err)
|
||||
kwargs.pop("follow", None) # Follow is not supported for Docker logs
|
||||
return await api_supervisor.logs(*args, **kwargs)
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/supervisor/logs", get_supervisor_logs),
|
||||
web.get(
|
||||
"/supervisor/logs/follow",
|
||||
partial(get_supervisor_logs, follow=True),
|
||||
),
|
||||
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
||||
web.get(
|
||||
"/supervisor/logs/boots/{bootid}/follow",
|
||||
partial(get_supervisor_logs, follow=True),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_homeassistant(self) -> None:
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
@ -464,6 +360,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/core/info", api_hass.info),
|
||||
web.get("/core/logs", api_hass.logs),
|
||||
web.get("/core/stats", api_hass.stats),
|
||||
web.post("/core/options", api_hass.options),
|
||||
web.post("/core/update", api_hass.update),
|
||||
@ -475,12 +372,11 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/core", "homeassistant")
|
||||
|
||||
# Reroute from legacy
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/homeassistant/info", api_hass.info),
|
||||
web.get("/homeassistant/logs", api_hass.logs),
|
||||
web.get("/homeassistant/stats", api_hass.stats),
|
||||
web.post("/homeassistant/options", api_hass.options),
|
||||
web.post("/homeassistant/restart", api_hass.restart),
|
||||
@ -492,8 +388,6 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/homeassistant", "homeassistant")
|
||||
|
||||
def _register_proxy(self) -> None:
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
@ -529,45 +423,24 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list_addons),
|
||||
web.get("/addons", api_addons.list),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||
web.post("/addons/{addon}/options", api_addons.options),
|
||||
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
|
||||
web.post(
|
||||
"/addons/{addon}/options/validate", api_addons.options_validate
|
||||
),
|
||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||
web.post("/addons/{addon}/security", api_addons.security),
|
||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||
]
|
||||
)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def get_addon_logs(request, *args, **kwargs):
|
||||
addon = api_addons.get_addon_for_request(request)
|
||||
kwargs["identifier"] = f"addon_{addon.slug}"
|
||||
return await self._api_host.advanced_logs(request, *args, **kwargs)
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons/{addon}/logs", get_addon_logs),
|
||||
web.get(
|
||||
"/addons/{addon}/logs/follow",
|
||||
partial(get_addon_logs, follow=True),
|
||||
),
|
||||
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
||||
web.get(
|
||||
"/addons/{addon}/logs/boots/{bootid}/follow",
|
||||
partial(get_addon_logs, follow=True),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
# Legacy routing to support requests for not installed addons
|
||||
api_store = APIStore()
|
||||
api_store.coresys = self.coresys
|
||||
@ -597,9 +470,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.post("/ingress/validate_session", api_ingress.validate_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.route(
|
||||
hdrs.METH_ANY, "/ingress/{token}/{path:.*}", api_ingress.handler
|
||||
),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
]
|
||||
)
|
||||
|
||||
@ -610,12 +481,10 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/backups", api_backups.list_backups),
|
||||
web.get("/backups", api_backups.list),
|
||||
web.get("/backups/info", api_backups.info),
|
||||
web.post("/backups/options", api_backups.options),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
web.post("/backups/freeze", api_backups.freeze),
|
||||
web.post("/backups/thaw", api_backups.thaw),
|
||||
web.post("/backups/new/full", api_backups.backup_full),
|
||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||
web.post("/backups/new/upload", api_backups.upload),
|
||||
@ -637,7 +506,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/services", api_services.list_services),
|
||||
web.get("/services", api_services.list),
|
||||
web.get("/services/{service}", api_services.get_service),
|
||||
web.post("/services/{service}", api_services.set_service),
|
||||
web.delete("/services/{service}", api_services.del_service),
|
||||
@ -651,7 +520,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/discovery", api_discovery.list_discovery),
|
||||
web.get("/discovery", api_discovery.list),
|
||||
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||
web.post("/discovery", api_discovery.set_discovery),
|
||||
@ -667,6 +536,7 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/dns/info", api_dns.info),
|
||||
web.get("/dns/stats", api_dns.stats),
|
||||
web.get("/dns/logs", api_dns.logs),
|
||||
web.post("/dns/update", api_dns.update),
|
||||
web.post("/dns/options", api_dns.options),
|
||||
web.post("/dns/restart", api_dns.restart),
|
||||
@ -674,17 +544,18 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/dns", "hassio_dns")
|
||||
|
||||
def _register_audio(self) -> None:
|
||||
"""Register Audio functions."""
|
||||
api_audio = APIAudio()
|
||||
api_audio.coresys = self.coresys
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/audio/info", api_audio.info),
|
||||
web.get("/audio/stats", api_audio.stats),
|
||||
web.get("/audio/logs", api_audio.logs),
|
||||
web.post("/audio/update", api_audio.update),
|
||||
web.post("/audio/restart", api_audio.restart),
|
||||
web.post("/audio/reload", api_audio.reload),
|
||||
@ -697,8 +568,6 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
self._register_advanced_logs("/audio", "hassio_audio")
|
||||
|
||||
def _register_mounts(self) -> None:
|
||||
"""Register mounts endpoints."""
|
||||
api_mounts = APIMounts()
|
||||
@ -725,6 +594,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/store", api_store.store_info),
|
||||
web.get("/store/addons", api_store.addons_list),
|
||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
||||
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
||||
web.get(
|
||||
@ -746,8 +616,6 @@ class RestAPI(CoreSysAttributes):
|
||||
"/store/addons/{addon}/update/{version}",
|
||||
api_store.addons_addon_update,
|
||||
),
|
||||
# Must be below others since it has a wildcard in resource path
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.post("/store/reload", api_store.reload),
|
||||
web.get("/store/repositories", api_store.repositories_list),
|
||||
web.get(
|
||||
@ -777,9 +645,10 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> list[StaticResourceConfig]:
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))]
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
|
||||
def _register_docker(self) -> None:
|
||||
"""Register docker configuration functions."""
|
||||
@ -789,7 +658,6 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/docker/info", api_docker.info),
|
||||
web.post("/docker/options", api_docker.options),
|
||||
web.get("/docker/registries", api_docker.registries),
|
||||
web.post("/docker/registries", api_docker.create_registry),
|
||||
web.delete("/docker/registries/{hostname}", api_docker.remove_registry),
|
||||
@ -799,7 +667,9 @@ class RestAPI(CoreSysAttributes):
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||
)
|
||||
|
||||
try:
|
||||
await self._site.start()
|
||||
|
@ -1,14 +1,14 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any, TypedDict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons import AnyAddon
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
@ -36,7 +36,6 @@ from ..const import (
|
||||
ATTR_DNS,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_DOCUMENTATION,
|
||||
ATTR_FORCE,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
@ -63,6 +62,7 @@ from ..const import (
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
@ -71,6 +71,7 @@ from ..const import (
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_PWNED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
@ -80,14 +81,13 @@ from ..const import (
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM_MANAGED,
|
||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VALID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VIDEO,
|
||||
@ -95,7 +95,6 @@ from ..const import (
|
||||
ATTR_WEBUI,
|
||||
REQUEST_FROM,
|
||||
AddonBoot,
|
||||
AddonBootConfig,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
@ -103,13 +102,12 @@ from ..exceptions import (
|
||||
APIAddonNotInstalled,
|
||||
APIError,
|
||||
APIForbidden,
|
||||
APINotFound,
|
||||
PwnedError,
|
||||
PwnedSecret,
|
||||
)
|
||||
from ..validate import docker_ports
|
||||
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
|
||||
from .utils import api_process, api_validate, json_loads
|
||||
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -128,37 +126,16 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_SYS_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
|
||||
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||
|
||||
SCHEMA_UNINSTALL = vol.Schema(
|
||||
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
||||
)
|
||||
|
||||
SCHEMA_REBUILD = vol.Schema({vol.Optional(ATTR_FORCE, default=False): vol.Boolean()})
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class OptionsValidateResponse(TypedDict):
|
||||
"""Response object for options validate."""
|
||||
|
||||
message: str
|
||||
valid: bool
|
||||
pwned: bool | None
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception if it doesn't exist."""
|
||||
addon_slug: str = request.match_info["addon"]
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == "self":
|
||||
@ -169,14 +146,14 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if not addon:
|
||||
raise APINotFound(f"Addon {addon_slug} does not exist")
|
||||
raise APIError(f"Addon {addon_slug} does not exist")
|
||||
if not isinstance(addon, Addon) or not addon.is_installed:
|
||||
raise APIAddonNotInstalled("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list_addons(self, request: web.Request) -> dict[str, Any]:
|
||||
async def list(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = [
|
||||
{
|
||||
@ -197,7 +174,6 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
||||
}
|
||||
for addon in self.sys_addons.installed
|
||||
]
|
||||
@ -211,7 +187,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: Addon = self.get_addon_for_request(request)
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
@ -219,14 +195,13 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOSTNAME: addon.hostname,
|
||||
ATTR_DNS: addon.dns,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: await addon.long_description(),
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_VERSION_LATEST: addon.latest_version,
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT_CONFIG: addon.boot_config,
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_SCHEMA: addon.schema_ui,
|
||||
@ -286,8 +261,6 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_WATCHDOG: addon.watchdog,
|
||||
ATTR_DEVICES: addon.static_devices
|
||||
+ [device.path for device in addon.devices],
|
||||
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
|
||||
}
|
||||
|
||||
return data
|
||||
@ -295,7 +268,7 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
@ -310,10 +283,6 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
|
||||
raise APIError(
|
||||
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
|
||||
)
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
@ -329,27 +298,13 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_WATCHDOG in body:
|
||||
addon.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
await addon.save_persist()
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def sys_options(self, request: web.Request) -> None:
|
||||
"""Store system options for an add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
|
||||
# Validate/Process Body
|
||||
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
|
||||
if ATTR_SYSTEM_MANAGED in body:
|
||||
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
|
||||
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
||||
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
||||
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> OptionsValidateResponse:
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
data = OptionsValidateResponse(message="", valid=True, pwned=False)
|
||||
addon = self._extract_addon(request)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||
|
||||
options = await request.json(loads=json_loads) or addon.options
|
||||
|
||||
@ -358,8 +313,8 @@ class APIAddons(CoreSysAttributes):
|
||||
try:
|
||||
options_schema.validate(options)
|
||||
except vol.Invalid as ex:
|
||||
data["message"] = humanize_error(options, ex)
|
||||
data["valid"] = False
|
||||
data[ATTR_MESSAGE] = humanize_error(options, ex)
|
||||
data[ATTR_VALID] = False
|
||||
|
||||
if not self.sys_security.pwned:
|
||||
return data
|
||||
@ -370,27 +325,27 @@ class APIAddons(CoreSysAttributes):
|
||||
await self.sys_security.verify_secret(secret)
|
||||
continue
|
||||
except PwnedSecret:
|
||||
data["pwned"] = True
|
||||
data[ATTR_PWNED] = True
|
||||
except PwnedError:
|
||||
data["pwned"] = None
|
||||
data[ATTR_PWNED] = None
|
||||
break
|
||||
|
||||
if self.sys_security.force and data["pwned"] in (None, True):
|
||||
data["valid"] = False
|
||||
if data["pwned"] is None:
|
||||
data["message"] = "Error happening on pwned secrets check!"
|
||||
if self.sys_security.force and data[ATTR_PWNED] in (None, True):
|
||||
data[ATTR_VALID] = False
|
||||
if data[ATTR_PWNED] is None:
|
||||
data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
|
||||
else:
|
||||
data["message"] = "Add-on uses pwned secrets!"
|
||||
data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options_config(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
slug: str = request.match_info["addon"]
|
||||
slug: str = request.match_info.get("addon")
|
||||
if slug != "self":
|
||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||
addon = self.get_addon_for_request(request)
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
# Lookup/reload secrets
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
@ -402,19 +357,19 @@ class APIAddons(CoreSysAttributes):
|
||||
@api_process
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
addon = self._extract_addon(request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
await addon.save_persist()
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
stats: DockerStats = await addon.stats()
|
||||
|
||||
@ -430,51 +385,45 @@ class APIAddons(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
||||
return await asyncio.shield(
|
||||
self.sys_addons.uninstall(
|
||||
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
||||
)
|
||||
)
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
async def start(self, request: web.Request) -> None:
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Start add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
if start_task := await asyncio.shield(addon.start()):
|
||||
await start_task
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
async def restart(self, request: web.Request) -> None:
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart add-on."""
|
||||
addon: Addon = self.get_addon_for_request(request)
|
||||
if start_task := await asyncio.shield(addon.restart()):
|
||||
await start_task
|
||||
addon: Addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
async def rebuild(self, request: web.Request) -> None:
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_REBUILD, request)
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
if start_task := await asyncio.shield(
|
||||
self.sys_addons.rebuild(addon.slug, force=body[ATTR_FORCE])
|
||||
):
|
||||
await start_task
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return logs from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||
|
||||
|
@ -1,12 +1,11 @@
|
||||
"""Init file for Supervisor Audio RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from dataclasses import asdict
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
@ -36,7 +35,8 @@ from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..host.sound import StreamType
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_validate
|
||||
from .const import CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -76,11 +76,15 @@ class APIAudio(CoreSysAttributes):
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||
ATTR_AUDIO: {
|
||||
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
|
||||
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
|
||||
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
||||
],
|
||||
ATTR_OUTPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
||||
],
|
||||
ATTR_APPLICATION: [
|
||||
asdict(stream) for stream in self.sys_host.sound.applications
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
||||
],
|
||||
},
|
||||
}
|
||||
@ -111,6 +115,11 @@ class APIAudio(CoreSysAttributes):
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Audio Docker logs."""
|
||||
return self.sys_plugins.audio.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Audio plugin."""
|
||||
@ -124,7 +133,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
@ -137,7 +146,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
@ -150,7 +159,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
@ -1,31 +1,18 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohttp import BasicAuth, web
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||
from aiohttp.web import FileField
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
from multidict import MultiDictProxy
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
from .const import (
|
||||
ATTR_GROUP_IDS,
|
||||
ATTR_IS_ACTIVE,
|
||||
ATTR_IS_OWNER,
|
||||
ATTR_LOCAL_ONLY,
|
||||
ATTR_USERS,
|
||||
CONTENT_TYPE_JSON,
|
||||
CONTENT_TYPE_URL,
|
||||
)
|
||||
from .utils import api_process, api_validate, json_loads
|
||||
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -44,7 +31,7 @@ REALM_HEADER: dict[str, str] = {
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]:
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
@ -53,11 +40,8 @@ class APIAuth(CoreSysAttributes):
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(
|
||||
self,
|
||||
request: web.Request,
|
||||
addon: Addon,
|
||||
data: dict[str, Any] | MultiDictProxy[str | bytes | FileField],
|
||||
) -> Awaitable[bool]:
|
||||
self, request: web.Request, addon: Addon, data: dict[str, str]
|
||||
) -> bool:
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
@ -65,22 +49,14 @@ class APIAuth(CoreSysAttributes):
|
||||
username = data.get("username") or data.get("user")
|
||||
password = data.get("password")
|
||||
|
||||
# Test that we did receive strings and not something else, raise if so
|
||||
try:
|
||||
_ = username.encode and password.encode # type: ignore
|
||||
except AttributeError:
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER) from None
|
||||
|
||||
return self.sys_auth.check_login(
|
||||
addon, cast(str, username), cast(str, password)
|
||||
)
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request: web.Request) -> bool:
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
if not isinstance(addon, Addon) or not addon.access_auth_api:
|
||||
if not addon.access_auth_api:
|
||||
raise APIForbidden("Can't use Home Assistant auth!")
|
||||
|
||||
# BasicAuth
|
||||
@ -91,19 +67,14 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json(loads=json_loads)
|
||||
if not await self._process_dict(request, addon, data):
|
||||
raise HTTPUnauthorized()
|
||||
return True
|
||||
data = await request.json()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# URL encoded
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||
data = await request.post()
|
||||
if not await self._process_dict(request, addon, data):
|
||||
raise HTTPUnauthorized()
|
||||
return True
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# Advertise Basic authentication by default
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
|
||||
@api_process
|
||||
@ -117,22 +88,4 @@ class APIAuth(CoreSysAttributes):
|
||||
@api_process
|
||||
async def cache(self, request: web.Request) -> None:
|
||||
"""Process cache reset request."""
|
||||
await self.sys_auth.reset_data()
|
||||
|
||||
@api_process
|
||||
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||
"""List users on the Home Assistant instance."""
|
||||
return {
|
||||
ATTR_USERS: [
|
||||
{
|
||||
ATTR_USERNAME: user[ATTR_USERNAME],
|
||||
ATTR_NAME: user[ATTR_NAME],
|
||||
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
|
||||
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
|
||||
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
|
||||
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
|
||||
}
|
||||
for user in await self.sys_auth.list_users()
|
||||
if user[ATTR_USERNAME]
|
||||
]
|
||||
}
|
||||
self.sys_auth.reset_data()
|
||||
|
@ -1,24 +1,15 @@
|
||||
"""Backups RESTful API."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import errno
|
||||
from io import IOBase
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import BodyPartReader, web
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..backups.backup import Backup
|
||||
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
|
||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@ -27,117 +18,67 @@ from ..const import (
|
||||
ATTR_CONTENT,
|
||||
ATTR_DATE,
|
||||
ATTR_DAYS_UNTIL_STALE,
|
||||
ATTR_EXTRA,
|
||||
ATTR_FILENAME,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
||||
ATTR_JOB_ID,
|
||||
ATTR_LOCATION,
|
||||
ATTR_LOCATON,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SIZE_BYTES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SUPERVISOR_VERSION,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
REQUEST_FROM,
|
||||
BusEvent,
|
||||
CoreState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||
from ..jobs import JobSchedulerOptions, SupervisorJob
|
||||
from ..exceptions import APIError
|
||||
from ..mounts.const import MountUsage
|
||||
from ..resolution.const import UnhealthyReason
|
||||
from .const import (
|
||||
ATTR_ADDITIONAL_LOCATIONS,
|
||||
ATTR_BACKGROUND,
|
||||
ATTR_LOCATION_ATTRIBUTES,
|
||||
ATTR_LOCATIONS,
|
||||
CONTENT_TYPE_TAR,
|
||||
)
|
||||
from .const import CONTENT_TYPE_TAR
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
ALL_ADDONS_FLAG = "ALL"
|
||||
|
||||
LOCATION_LOCAL = ".local"
|
||||
|
||||
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
|
||||
|
||||
# Backwards compatible
|
||||
# Remove: 2022.08
|
||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||
|
||||
|
||||
def _ensure_list(item: Any) -> list:
|
||||
"""Ensure value is a list."""
|
||||
if not isinstance(item, list):
|
||||
return [item]
|
||||
return item
|
||||
|
||||
|
||||
def _convert_local_location(item: str | None) -> str | None:
|
||||
"""Convert local location value."""
|
||||
if item in {LOCATION_LOCAL, ""}:
|
||||
return None
|
||||
return item
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
|
||||
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
|
||||
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||
}
|
||||
)
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
||||
|
||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_NAME): str,
|
||||
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST,
|
||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_EXTRA): dict,
|
||||
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_ADDONS): vol.Or(
|
||||
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
|
||||
),
|
||||
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale})
|
||||
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))})
|
||||
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST})
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIBackups(CoreSysAttributes):
|
||||
@ -147,19 +88,9 @@ class APIBackups(CoreSysAttributes):
|
||||
"""Return backup, throw an exception if it doesn't exist."""
|
||||
backup = self.sys_backups.get(request.match_info.get("slug"))
|
||||
if not backup:
|
||||
raise APINotFound("Backup does not exist")
|
||||
raise APIError("Backup does not exist")
|
||||
return backup
|
||||
|
||||
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
|
||||
"""Make location attributes dictionary."""
|
||||
return {
|
||||
loc if loc else LOCATION_LOCAL: {
|
||||
ATTR_PROTECTED: backup.all_locations[loc].protected,
|
||||
ATTR_SIZE_BYTES: backup.all_locations[loc].size_bytes,
|
||||
}
|
||||
for loc in backup.locations
|
||||
}
|
||||
|
||||
def _list_backups(self):
|
||||
"""Return list of backups."""
|
||||
return [
|
||||
@ -169,11 +100,7 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_TYPE: backup.sys_type,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||
ATTR_LOCATION: backup.location,
|
||||
ATTR_LOCATIONS: backup.locations,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_CONTENT: {
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||
@ -182,11 +109,10 @@ class APIBackups(CoreSysAttributes):
|
||||
},
|
||||
}
|
||||
for backup in self.sys_backups.list_backups
|
||||
if backup.location != LOCATION_CLOUD_BACKUP
|
||||
]
|
||||
|
||||
@api_process
|
||||
async def list_backups(self, request):
|
||||
async def list(self, request):
|
||||
"""Return backup list."""
|
||||
data_backups = self._list_backups()
|
||||
|
||||
@ -212,10 +138,10 @@ class APIBackups(CoreSysAttributes):
|
||||
if ATTR_DAYS_UNTIL_STALE in body:
|
||||
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
|
||||
|
||||
await self.sys_backups.save_data()
|
||||
self.sys_backups.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload(self, _):
|
||||
async def reload(self, request):
|
||||
"""Reload backup list."""
|
||||
await asyncio.shield(self.sys_backups.reload())
|
||||
return True
|
||||
@ -242,338 +168,111 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_NAME: backup.name,
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||
ATTR_LOCATION: backup.location,
|
||||
ATTR_LOCATIONS: backup.locations,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: backup.repositories,
|
||||
ATTR_FOLDERS: backup.folders,
|
||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
||||
ATTR_EXTRA: backup.extra,
|
||||
}
|
||||
|
||||
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
|
||||
"""Convert a single location to a mount if possible."""
|
||||
if not location or location == LOCATION_CLOUD_BACKUP:
|
||||
return cast(LOCATION_TYPE, location)
|
||||
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Change location field to mount if necessary."""
|
||||
if not body.get(ATTR_LOCATON):
|
||||
return body
|
||||
|
||||
mount = self.sys_mounts.get(location)
|
||||
if mount.usage != MountUsage.BACKUP:
|
||||
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
|
||||
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
|
||||
raise APIError(
|
||||
f"Mount {mount.name} is not used for backups, cannot backup to there"
|
||||
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
|
||||
)
|
||||
|
||||
return mount
|
||||
|
||||
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Change location field to mount if necessary."""
|
||||
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
|
||||
return body
|
||||
|
||||
def _validate_cloud_backup_location(
|
||||
self, request: web.Request, location: list[str | None] | str | None
|
||||
) -> None:
|
||||
"""Cloud backup location is only available to Home Assistant."""
|
||||
if not isinstance(location, list):
|
||||
location = [location]
|
||||
if (
|
||||
LOCATION_CLOUD_BACKUP in location
|
||||
and request.get(REQUEST_FROM) != self.sys_homeassistant
|
||||
):
|
||||
raise APIForbidden(
|
||||
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
|
||||
)
|
||||
|
||||
async def _background_backup_task(
|
||||
self, backup_method: Callable, *args, **kwargs
|
||||
) -> tuple[asyncio.Task, str]:
|
||||
"""Start backup task in background and return task and job ID."""
|
||||
event = asyncio.Event()
|
||||
job, backup_task = cast(
|
||||
tuple[SupervisorJob, asyncio.Task],
|
||||
self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
),
|
||||
)
|
||||
|
||||
async def release_on_freeze(new_state: CoreState):
|
||||
if new_state == CoreState.FREEZE:
|
||||
event.set()
|
||||
|
||||
# Wait for system to get into freeze state before returning
|
||||
# If the backup fails validation it will raise before getting there
|
||||
listener = self.sys_bus.register_event(
|
||||
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||
)
|
||||
try:
|
||||
event_task = self.sys_create_task(event.wait())
|
||||
_, pending = await asyncio.wait(
|
||||
(backup_task, event_task),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
# It seems backup returned early (error or something), make sure to cancel
|
||||
# the event task to avoid "Task was destroyed but it is pending!" errors.
|
||||
if event_task in pending:
|
||||
event_task.cancel()
|
||||
return (backup_task, job.uuid)
|
||||
finally:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
@api_process
|
||||
async def backup_full(self, request: web.Request):
|
||||
async def backup_full(self, request):
|
||||
"""Create full backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
|
||||
if ATTR_LOCATION in body:
|
||||
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
||||
self._validate_cloud_backup_location(request, location_names)
|
||||
|
||||
locations = [
|
||||
self._location_to_mount(location) for location in location_names
|
||||
]
|
||||
body[ATTR_LOCATION] = locations.pop(0)
|
||||
if locations:
|
||||
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
backup_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_backup_full, **body
|
||||
backup = await asyncio.shield(
|
||||
self.sys_backups.do_backup_full(**self._location_to_mount(body))
|
||||
)
|
||||
|
||||
if background and not backup_task.done():
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
|
||||
backup: Backup = await backup_task
|
||||
if backup:
|
||||
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||
raise APIError(
|
||||
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def backup_partial(self, request: web.Request):
|
||||
"""Create a partial backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
|
||||
if ATTR_LOCATION in body:
|
||||
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
||||
self._validate_cloud_backup_location(request, location_names)
|
||||
|
||||
locations = [
|
||||
self._location_to_mount(location) for location in location_names
|
||||
]
|
||||
body[ATTR_LOCATION] = locations.pop(0)
|
||||
if locations:
|
||||
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||
|
||||
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
|
||||
body[ATTR_ADDONS] = list(self.sys_addons.local)
|
||||
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
backup_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_backup_partial, **body
|
||||
)
|
||||
|
||||
if background and not backup_task.done():
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
|
||||
backup: Backup = await backup_task
|
||||
if backup:
|
||||
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||
raise APIError(
|
||||
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request: web.Request):
|
||||
"""Full restore of a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
self._validate_cloud_backup_location(
|
||||
request, body.get(ATTR_LOCATION, backup.location)
|
||||
)
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
restore_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_restore_full, backup, **body
|
||||
)
|
||||
|
||||
if background and not restore_task.done() or await restore_task:
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
raise APIError(
|
||||
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request: web.Request):
|
||||
"""Partial restore a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
self._validate_cloud_backup_location(
|
||||
request, body.get(ATTR_LOCATION, backup.location)
|
||||
)
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
restore_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_restore_partial, backup, **body
|
||||
)
|
||||
|
||||
if background and not restore_task.done() or await restore_task:
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
raise APIError(
|
||||
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||
job_id=job_id,
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def freeze(self, request: web.Request):
|
||||
"""Initiate manual freeze for external backup."""
|
||||
body = await api_validate(SCHEMA_FREEZE, request)
|
||||
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
||||
|
||||
@api_process
|
||||
async def thaw(self, request: web.Request):
|
||||
"""Begin thaw after manual freeze."""
|
||||
await self.sys_backups.thaw_all()
|
||||
|
||||
@api_process
|
||||
async def remove(self, request: web.Request):
|
||||
"""Remove a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_REMOVE, request)
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
|
||||
if ATTR_LOCATION in body:
|
||||
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
|
||||
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
|
||||
else:
|
||||
self._validate_cloud_backup_location(request, backup.location)
|
||||
|
||||
await self.sys_backups.remove(backup, locations=locations)
|
||||
|
||||
@api_process
|
||||
async def download(self, request: web.Request):
|
||||
"""Download a backup file."""
|
||||
backup = self._extract_slug(request)
|
||||
# Query will give us '' for /backups, convert value to None
|
||||
location = _convert_local_location(
|
||||
request.query.get(ATTR_LOCATION, backup.location)
|
||||
)
|
||||
self._validate_cloud_backup_location(request, location)
|
||||
if location not in backup.all_locations:
|
||||
raise APIError(f"Backup {backup.slug} is not in location {location}")
|
||||
|
||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||
filename = backup.all_locations[location].path
|
||||
# If the file is missing, return 404 and trigger reload of location
|
||||
if not await self.sys_run_in_executor(filename.is_file):
|
||||
self.sys_create_task(self.sys_backups.reload(location))
|
||||
return web.Response(status=404)
|
||||
|
||||
response = web.FileResponse(filename)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
|
||||
download_filename = filename.name
|
||||
if download_filename == f"{backup.slug}.tar":
|
||||
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||
response.headers[CONTENT_DISPOSITION] = (
|
||||
f"attachment; filename={download_filename}"
|
||||
)
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request: web.Request):
|
||||
"""Upload a backup file."""
|
||||
location: LOCATION_TYPE = None
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
|
||||
if ATTR_LOCATION in request.query:
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION, [])
|
||||
self._validate_cloud_backup_location(
|
||||
request, cast(list[str | None], location_names)
|
||||
)
|
||||
# Convert empty string to None if necessary
|
||||
locations = [
|
||||
self._location_to_mount(location)
|
||||
if _convert_local_location(location)
|
||||
else None
|
||||
for location in location_names
|
||||
]
|
||||
location = locations.pop(0)
|
||||
|
||||
filename: str | None = None
|
||||
if ATTR_FILENAME in request.query:
|
||||
filename = request.query.get(ATTR_FILENAME)
|
||||
try:
|
||||
vol.Match(RE_BACKUP_FILENAME)(filename)
|
||||
except vol.Invalid as ex:
|
||||
raise APIError(humanize_error(filename, ex)) from None
|
||||
|
||||
tmp_path = await self.sys_backups.get_upload_path_for_location(location)
|
||||
temp_dir: TemporaryDirectory | None = None
|
||||
backup_file_stream: IOBase | None = None
|
||||
|
||||
def open_backup_file() -> Path:
|
||||
nonlocal temp_dir, backup_file_stream
|
||||
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
|
||||
tar_file = Path(temp_dir.name, "upload.tar")
|
||||
backup_file_stream = tar_file.open("wb")
|
||||
return tar_file
|
||||
|
||||
def close_backup_file() -> None:
|
||||
if backup_file_stream:
|
||||
# Make sure it got closed, in case of exception. It is safe to
|
||||
# close the file stream twice.
|
||||
backup_file_stream.close()
|
||||
if temp_dir:
|
||||
temp_dir.cleanup()
|
||||
|
||||
try:
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
if not isinstance(contents, BodyPartReader):
|
||||
raise APIError("Improperly formatted upload, could not read backup")
|
||||
|
||||
tar_file = await self.sys_run_in_executor(open_backup_file)
|
||||
while chunk := await contents.read_chunk(size=2**16):
|
||||
await self.sys_run_in_executor(
|
||||
cast(IOBase, backup_file_stream).write, chunk
|
||||
)
|
||||
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
|
||||
|
||||
backup = await asyncio.shield(
|
||||
self.sys_backups.import_backup(
|
||||
tar_file,
|
||||
filename,
|
||||
location=location,
|
||||
additional_locations=locations,
|
||||
)
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and location in {
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
None,
|
||||
}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
finally:
|
||||
await self.sys_run_in_executor(close_backup_file)
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def backup_partial(self, request):
|
||||
"""Create a partial backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||
backup = await asyncio.shield(
|
||||
self.sys_backups.do_backup_partial(**self._location_to_mount(body))
|
||||
)
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request):
|
||||
"""Full restore of a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial restore a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
return self.sys_backups.remove(backup)
|
||||
|
||||
async def download(self, request):
|
||||
"""Download a backup file."""
|
||||
backup = self._extract_slug(request)
|
||||
|
||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||
response = web.FileResponse(backup.tarfile)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
response.headers[
|
||||
CONTENT_DISPOSITION
|
||||
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request):
|
||||
"""Upload a backup file."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||
tar_file = Path(temp_dir, "backup.tar")
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
try:
|
||||
with tar_file.open("wb") as backup:
|
||||
while True:
|
||||
chunk = await contents.read_chunk()
|
||||
if not chunk:
|
||||
break
|
||||
backup.write(chunk)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor HA cli RESTful API."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
@ -1,26 +1,18 @@
|
||||
"""Const for API."""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||
CONTENT_TYPE_JSON = "application/json"
|
||||
CONTENT_TYPE_PNG = "image/png"
|
||||
CONTENT_TYPE_TAR = "application/tar"
|
||||
CONTENT_TYPE_TEXT = "text/plain"
|
||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||
CONTENT_TYPE_X_LOG = "text/x-log"
|
||||
|
||||
COOKIE_INGRESS = "ingress_session"
|
||||
|
||||
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
|
||||
ATTR_AGENT_VERSION = "agent_version"
|
||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||
ATTR_ATTRIBUTES = "attributes"
|
||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
||||
ATTR_BACKGROUND = "background"
|
||||
ATTR_BOOT_CONFIG = "boot_config"
|
||||
ATTR_BOOT_SLOT = "boot_slot"
|
||||
ATTR_BOOT_SLOTS = "boot_slots"
|
||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||
ATTR_BOOTS = "boots"
|
||||
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
||||
@ -31,6 +23,7 @@ ATTR_CONNECTION_BUS = "connection_bus"
|
||||
ATTR_DATA_DISK = "data_disk"
|
||||
ATTR_DEVICE = "device"
|
||||
ATTR_DEV_PATH = "dev_path"
|
||||
ATTR_DISK_LED = "disk_led"
|
||||
ATTR_DISKS = "disks"
|
||||
ATTR_DRIVES = "drives"
|
||||
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
||||
@ -38,53 +31,25 @@ ATTR_DT_UTC = "dt_utc"
|
||||
ATTR_EJECTABLE = "ejectable"
|
||||
ATTR_FALLBACK = "fallback"
|
||||
ATTR_FILESYSTEMS = "filesystems"
|
||||
ATTR_FORCE = "force"
|
||||
ATTR_GROUP_IDS = "group_ids"
|
||||
ATTR_HEARTBEAT_LED = "heartbeat_led"
|
||||
ATTR_IDENTIFIERS = "identifiers"
|
||||
ATTR_IS_ACTIVE = "is_active"
|
||||
ATTR_IS_OWNER = "is_owner"
|
||||
ATTR_JOBS = "jobs"
|
||||
ATTR_LLMNR = "llmnr"
|
||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||
ATTR_LOCAL_ONLY = "local_only"
|
||||
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
|
||||
ATTR_LOCATIONS = "locations"
|
||||
ATTR_MDNS = "mdns"
|
||||
ATTR_MODEL = "model"
|
||||
ATTR_MOUNTS = "mounts"
|
||||
ATTR_MOUNT_POINTS = "mount_points"
|
||||
ATTR_PANEL_PATH = "panel_path"
|
||||
ATTR_POWER_LED = "power_led"
|
||||
ATTR_REMOVABLE = "removable"
|
||||
ATTR_REMOVE_CONFIG = "remove_config"
|
||||
ATTR_REVISION = "revision"
|
||||
ATTR_SAFE_MODE = "safe_mode"
|
||||
ATTR_SEAT = "seat"
|
||||
ATTR_SIGNED = "signed"
|
||||
ATTR_STARTUP_TIME = "startup_time"
|
||||
ATTR_STATUS = "status"
|
||||
ATTR_SUBSYSTEM = "subsystem"
|
||||
ATTR_SYSFS = "sysfs"
|
||||
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
||||
ATTR_TIME_DETECTED = "time_detected"
|
||||
ATTR_UPDATE_TYPE = "update_type"
|
||||
ATTR_USAGE = "usage"
|
||||
ATTR_USE_NTP = "use_ntp"
|
||||
ATTR_USERS = "users"
|
||||
ATTR_USER_PATH = "user_path"
|
||||
ATTR_USAGE = "usage"
|
||||
ATTR_VENDOR = "vendor"
|
||||
ATTR_VIRTUALIZATION = "virtualization"
|
||||
|
||||
|
||||
class BootSlot(StrEnum):
|
||||
"""Boot slots used by HAOS."""
|
||||
|
||||
A = "A"
|
||||
B = "B"
|
||||
|
||||
|
||||
class DetectBlockingIO(StrEnum):
|
||||
"""Enable/Disable detection for blocking I/O in event loop."""
|
||||
|
||||
OFF = "off"
|
||||
ON = "on"
|
||||
ON_AT_STARTUP = "on-at-startup"
|
||||
|
@ -1,12 +1,6 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
ATTR_CONFIG,
|
||||
@ -15,19 +9,16 @@ from ..const import (
|
||||
ATTR_SERVICES,
|
||||
ATTR_UUID,
|
||||
REQUEST_FROM,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..discovery import Message
|
||||
from ..exceptions import APIForbidden, APINotFound
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SERVICE): str,
|
||||
vol.Required(ATTR_CONFIG): dict,
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||
}
|
||||
)
|
||||
|
||||
@ -35,36 +26,32 @@ SCHEMA_DISCOVERY = vol.Schema(
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle RESTful API for discovery functions."""
|
||||
|
||||
def _extract_message(self, request: web.Request) -> Message:
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info["uuid"])
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
if not message:
|
||||
raise APINotFound("Discovery message not found")
|
||||
raise APIError("Discovery message not found")
|
||||
return message
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def list_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Show registered and available services."""
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
|
||||
# Get available discovery
|
||||
discovery = [
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
for message in self.sys_discovery.list_messages
|
||||
if (
|
||||
discovered := self.sys_addons.get_local_only(
|
||||
message.addon,
|
||||
)
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append(
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
)
|
||||
and discovered.state == AddonState.STARTED
|
||||
]
|
||||
|
||||
# Get available services/add-ons
|
||||
services: dict[str, list[str]] = {}
|
||||
services = {}
|
||||
for addon in self.sys_addons.all:
|
||||
for name in addon.discovery:
|
||||
services.setdefault(name, []).append(addon.slug)
|
||||
@ -72,31 +59,23 @@ class APIDiscovery(CoreSysAttributes):
|
||||
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request: web.Request) -> dict[str, str]:
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon: Addon = request[REQUEST_FROM]
|
||||
service = body[ATTR_SERVICE]
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
# Access?
|
||||
if body[ATTR_SERVICE] not in addon.discovery:
|
||||
_LOGGER.error(
|
||||
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
|
||||
addon.name,
|
||||
service,
|
||||
)
|
||||
raise APIForbidden(
|
||||
"Add-ons must list services they provide via discovery in their config!"
|
||||
)
|
||||
raise APIForbidden("Can't use discovery!")
|
||||
|
||||
# Process discovery message
|
||||
message = await self.sys_discovery.send(addon, **body)
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def get_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
@ -108,7 +87,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request: web.Request) -> None:
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
@ -117,4 +96,5 @@ class APIDiscovery(CoreSysAttributes):
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden("Can't remove discovery message")
|
||||
|
||||
await self.sys_discovery.remove(message)
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor DNS RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
@ -27,8 +26,8 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import dns_server_list, version_tag
|
||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
|
||||
from .utils import api_process, api_validate
|
||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -78,7 +77,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
if restart_required:
|
||||
self.sys_create_task(self.sys_plugins.dns.restart())
|
||||
|
||||
await self.sys_plugins.dns.save_data()
|
||||
self.sys_plugins.dns.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
@ -106,6 +105,11 @@ class APICoreDNS(CoreSysAttributes):
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return DNS Docker logs."""
|
||||
return self.sys_plugins.dns.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart CoreDNS plugin."""
|
||||
|
@ -1,15 +1,11 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
|
||||
from ..const import (
|
||||
ATTR_ENABLE_IPV6,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_PASSWORD,
|
||||
@ -19,7 +15,6 @@ from ..const import (
|
||||
ATTR_VERSION,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APINotFound
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -33,48 +28,10 @@ SCHEMA_DOCKER_REGISTRY = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_ENABLE_IPV6): vol.Maybe(vol.Boolean())})
|
||||
|
||||
|
||||
class APIDocker(CoreSysAttributes):
|
||||
"""Handle RESTful API for Docker configuration."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
"""Get docker info."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
data_registries[hostname] = {
|
||||
ATTR_USERNAME: registry[ATTR_USERNAME],
|
||||
}
|
||||
return {
|
||||
ATTR_VERSION: self.sys_docker.info.version,
|
||||
ATTR_ENABLE_IPV6: self.sys_docker.config.enable_ipv6,
|
||||
ATTR_STORAGE: self.sys_docker.info.storage,
|
||||
ATTR_LOGGING: self.sys_docker.info.logging,
|
||||
ATTR_REGISTRIES: data_registries,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set docker options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if (
|
||||
ATTR_ENABLE_IPV6 in body
|
||||
and self.sys_docker.config.enable_ipv6 != body[ATTR_ENABLE_IPV6]
|
||||
):
|
||||
self.sys_docker.config.enable_ipv6 = body[ATTR_ENABLE_IPV6]
|
||||
_LOGGER.info("Host system reboot required to apply new IPv6 configuration")
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
||||
)
|
||||
|
||||
await self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def registries(self, request) -> dict[str, Any]:
|
||||
"""Return the list of registries."""
|
||||
@ -94,14 +51,26 @@ class APIDocker(CoreSysAttributes):
|
||||
for hostname, registry in body.items():
|
||||
self.sys_docker.config.registries[hostname] = registry
|
||||
|
||||
await self.sys_docker.config.save_data()
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def remove_registry(self, request: web.Request):
|
||||
"""Delete a docker registry."""
|
||||
hostname = request.match_info.get(ATTR_HOSTNAME)
|
||||
if hostname not in self.sys_docker.config.registries:
|
||||
raise APINotFound(f"Hostname {hostname} does not exist in registries")
|
||||
|
||||
del self.sys_docker.config.registries[hostname]
|
||||
await self.sys_docker.config.save_data()
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
"""Get docker info."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
data_registries[hostname] = {
|
||||
ATTR_USERNAME: registry[ATTR_USERNAME],
|
||||
}
|
||||
return {
|
||||
ATTR_VERSION: self.sys_docker.info.version,
|
||||
ATTR_STORAGE: self.sys_docker.info.storage,
|
||||
ATTR_LOGGING: self.sys_docker.info.logging,
|
||||
ATTR_REGISTRIES: data_registries,
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor hardware RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -17,7 +16,7 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..dbus.udisks2 import UDisks2Manager
|
||||
from ..dbus.udisks2 import UDisks2
|
||||
from ..dbus.udisks2.block import UDisks2Block
|
||||
from ..dbus.udisks2.drive import UDisks2Drive
|
||||
from ..hardware.data import Device
|
||||
@ -68,15 +67,12 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
||||
ATTR_NAME: fs_block.id_label,
|
||||
ATTR_SYSTEM: fs_block.hint_system,
|
||||
ATTR_MOUNT_POINTS: [
|
||||
str(mount_point)
|
||||
for mount_point in (
|
||||
fs_block.filesystem.mount_points if fs_block.filesystem else []
|
||||
)
|
||||
str(mount_point) for mount_point in fs_block.filesystem.mount_points
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
||||
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
|
||||
"""Return a dict with information of a disk to be used in the API."""
|
||||
return {
|
||||
ATTR_VENDOR: drive.vendor,
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
@ -13,7 +12,6 @@ from ..const import (
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_BACKUP,
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
@ -35,10 +33,10 @@ from ..const import (
|
||||
ATTR_WATCHDOG,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIDBMigrationInProgress, APIError
|
||||
from ..exceptions import APIError
|
||||
from ..validate import docker_image, network_port, version_tag
|
||||
from .const import ATTR_FORCE, ATTR_SAFE_MODE
|
||||
from .utils import api_process, api_validate
|
||||
from .const import CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -53,7 +51,6 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
@ -64,34 +61,10 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTART = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_STOP = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
|
||||
async def _check_offline_migration(self, force: bool = False) -> None:
|
||||
"""Check and raise if there's an offline DB migration in progress."""
|
||||
if (
|
||||
not force
|
||||
and (state := await self.sys_homeassistant.api.get_api_state())
|
||||
and state.offline_db_migration
|
||||
):
|
||||
raise APIDBMigrationInProgress(
|
||||
"Offline database migration in progress, try again after it has completed"
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return host information."""
|
||||
@ -109,7 +82,6 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@ -118,10 +90,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.sys_homeassistant.set_image(body[ATTR_IMAGE])
|
||||
self.sys_homeassistant.override_image = (
|
||||
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||
)
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
@ -144,12 +113,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
|
||||
self.sys_homeassistant.backups_exclude_database = body[
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE
|
||||
]
|
||||
|
||||
await self.sys_homeassistant.save_data()
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[Any, str]:
|
||||
@ -173,7 +137,6 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
await self._check_offline_migration()
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.update(
|
||||
@ -183,12 +146,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop Home Assistant."""
|
||||
body = await api_validate(SCHEMA_STOP, request)
|
||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||
|
||||
return await asyncio.shield(self.sys_homeassistant.core.stop())
|
||||
return asyncio.shield(self.sys_homeassistant.core.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
@ -196,24 +156,19 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||
|
||||
@api_process
|
||||
async def restart(self, request: web.Request) -> None:
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Home Assistant."""
|
||||
body = await api_validate(SCHEMA_RESTART, request)
|
||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
|
||||
)
|
||||
return asyncio.shield(self.sys_homeassistant.core.restart())
|
||||
|
||||
@api_process
|
||||
async def rebuild(self, request: web.Request) -> None:
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild Home Assistant."""
|
||||
body = await api_validate(SCHEMA_RESTART, request)
|
||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
|
||||
)
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.core.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request: web.Request) -> None:
|
||||
|
@ -1,11 +1,9 @@
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionResetError, ClientPayloadError, web
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import ACCEPT, RANGE
|
||||
import voluptuous as vol
|
||||
from voluptuous.error import CoerceInvalid
|
||||
@ -29,16 +27,8 @@ from ..const import (
|
||||
ATTR_TIMEZONE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
|
||||
from ..host.const import (
|
||||
PARAM_BOOT_ID,
|
||||
PARAM_FOLLOW,
|
||||
PARAM_SYSLOG_IDENTIFIER,
|
||||
LogFormat,
|
||||
LogFormatter,
|
||||
)
|
||||
from ..host.logs import SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX
|
||||
from ..utils.systemd_journal import journal_logs_reader
|
||||
from ..exceptions import APIError, HostLogError
|
||||
from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
|
||||
from .const import (
|
||||
ATTR_AGENT_VERSION,
|
||||
ATTR_APPARMOR_VERSION,
|
||||
@ -48,48 +38,26 @@ from .const import (
|
||||
ATTR_BROADCAST_MDNS,
|
||||
ATTR_DT_SYNCHRONIZED,
|
||||
ATTR_DT_UTC,
|
||||
ATTR_FORCE,
|
||||
ATTR_IDENTIFIERS,
|
||||
ATTR_LLMNR_HOSTNAME,
|
||||
ATTR_STARTUP_TIME,
|
||||
ATTR_USE_NTP,
|
||||
ATTR_VIRTUALIZATION,
|
||||
CONTENT_TYPE_TEXT,
|
||||
CONTENT_TYPE_X_LOG,
|
||||
)
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
IDENTIFIER = "identifier"
|
||||
BOOTID = "bootid"
|
||||
DEFAULT_LINES = 100
|
||||
DEFAULT_RANGE = 100
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SHUTDOWN = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class APIHost(CoreSysAttributes):
|
||||
"""Handle RESTful API for host functions."""
|
||||
|
||||
async def _check_ha_offline_migration(self, force: bool) -> None:
|
||||
"""Check if HA has an offline migration in progress and raise if not forced."""
|
||||
if (
|
||||
not force
|
||||
and (state := await self.sys_homeassistant.api.get_api_state())
|
||||
and state.offline_db_migration
|
||||
):
|
||||
raise APIDBMigrationInProgress(
|
||||
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
@ -97,13 +65,12 @@ class APIHost(CoreSysAttributes):
|
||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_DISK_FREE: await self.sys_host.info.free_space(),
|
||||
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(),
|
||||
ATTR_DISK_USED: await self.sys_host.info.used_space(),
|
||||
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(),
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
|
||||
@ -131,20 +98,14 @@ class APIHost(CoreSysAttributes):
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def reboot(self, request):
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||
|
||||
return await asyncio.shield(self.sys_host.control.reboot())
|
||||
return asyncio.shield(self.sys_host.control.reboot())
|
||||
|
||||
@api_process
|
||||
async def shutdown(self, request):
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||
|
||||
return await asyncio.shield(self.sys_host.control.shutdown())
|
||||
return asyncio.shield(self.sys_host.control.shutdown())
|
||||
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
@ -192,100 +153,50 @@ class APIHost(CoreSysAttributes):
|
||||
raise APIError() from err
|
||||
return possible_offset
|
||||
|
||||
async def advanced_logs_handler(
|
||||
@api_process
|
||||
async def advanced_logs(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs."""
|
||||
log_formatter = LogFormatter.PLAIN
|
||||
params: dict[str, Any] = {}
|
||||
params = {}
|
||||
if identifier:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||
elif IDENTIFIER in request.match_info:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info[IDENTIFIER]
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||
else:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||
# host logs should be always verbose, no matter what Accept header is used
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if BOOTID in request.match_info:
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(request.match_info[BOOTID])
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||
request.match_info.get(BOOTID)
|
||||
)
|
||||
if follow:
|
||||
params[PARAM_FOLLOW] = ""
|
||||
|
||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
||||
CONTENT_TYPE_TEXT,
|
||||
CONTENT_TYPE_X_LOG,
|
||||
"*/*",
|
||||
]:
|
||||
raise APIError(
|
||||
"Invalid content type requested. Only text/plain and text/x-log "
|
||||
"supported for now."
|
||||
"Invalid content type requested. Only text/plain supported for now."
|
||||
)
|
||||
|
||||
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if "lines" in request.query:
|
||||
lines = request.query.get("lines", DEFAULT_LINES)
|
||||
try:
|
||||
lines = int(lines)
|
||||
except ValueError:
|
||||
# If the user passed a non-integer value, just use the default instead of error.
|
||||
lines = DEFAULT_LINES
|
||||
finally:
|
||||
# We can't use the entries= Range header syntax to refer to the last 1 line,
|
||||
# and passing 1 to the calculation below would return the 1st line of the logs
|
||||
# instead. Since this is really an edge case that doesn't matter much, we'll just
|
||||
# return 2 lines at minimum.
|
||||
lines = max(2, lines)
|
||||
# entries=cursor[[:num_skip]:num_entries]
|
||||
range_header = f"entries=:-{lines - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else lines}"
|
||||
elif RANGE in request.headers:
|
||||
range_header = request.headers[RANGE]
|
||||
if RANGE in request.headers:
|
||||
range_header = request.headers.get(RANGE)
|
||||
else:
|
||||
range_header = f"entries=:-{DEFAULT_LINES - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else DEFAULT_LINES}"
|
||||
range_header = f"entries=:-{DEFAULT_RANGE}:"
|
||||
|
||||
async with self.sys_host.logs.journald_logs(
|
||||
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
||||
params=params, range_header=range_header
|
||||
) as resp:
|
||||
try:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = CONTENT_TYPE_TEXT
|
||||
headers_returned = False
|
||||
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
||||
try:
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
await response.write(line.encode("utf-8") + b"\n")
|
||||
except ClientConnectionResetError as err:
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
_LOGGER.debug(
|
||||
"ClientConnectionResetError raised when returning journal logs: %s",
|
||||
err,
|
||||
)
|
||||
break
|
||||
except ConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"%s raised when returning journal logs: %s",
|
||||
type(err).__name__,
|
||||
err,
|
||||
)
|
||||
break
|
||||
except (ConnectionResetError, ClientPayloadError) as ex:
|
||||
# ClientPayloadError is most likely caused by the closing the connection
|
||||
await response.prepare(request)
|
||||
async for data in resp.content:
|
||||
await response.write(data)
|
||||
except ConnectionResetError as ex:
|
||||
raise APIError(
|
||||
"Connection reset when trying to fetch data from systemd-journald."
|
||||
) from ex
|
||||
return response
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def advanced_logs(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||
return await self.advanced_logs_handler(request, identifier, follow)
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Supervisor Add-on ingress service."""
|
||||
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
@ -22,18 +21,11 @@ from ..const import (
|
||||
ATTR_ICON,
|
||||
ATTR_PANELS,
|
||||
ATTR_SESSION,
|
||||
ATTR_SESSION_DATA_USER_ID,
|
||||
ATTR_TITLE,
|
||||
HEADER_REMOTE_USER_DISPLAY_NAME,
|
||||
HEADER_REMOTE_USER_ID,
|
||||
HEADER_REMOTE_USER_NAME,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
IngressSessionData,
|
||||
IngressSessionDataUser,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAPIError
|
||||
from .const import COOKIE_INGRESS
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
@ -41,49 +33,13 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
||||
|
||||
"""Expected optional payload of create session request"""
|
||||
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
|
||||
def must_be_empty_body(method: str, code: int) -> bool:
|
||||
"""Check if a request must return an empty body."""
|
||||
return (
|
||||
status_code_must_be_empty_body(code)
|
||||
or method_must_be_empty_body(method)
|
||||
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
||||
)
|
||||
|
||||
|
||||
def method_must_be_empty_body(method: str) -> bool:
|
||||
"""Check if a method must return an empty body."""
|
||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
||||
return method.upper() == hdrs.METH_HEAD
|
||||
|
||||
|
||||
def status_code_must_be_empty_body(code: int) -> bool:
|
||||
"""Check if a status code must return an empty body."""
|
||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
||||
return code in {204, 304} or 100 <= code < 200
|
||||
|
||||
|
||||
class APIIngress(CoreSysAttributes):
|
||||
"""Ingress view to handle add-on webui routing."""
|
||||
|
||||
_list_of_users: list[IngressSessionDataUser]
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize APIIngress."""
|
||||
self._list_of_users = []
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info["token"]
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
@ -115,24 +71,12 @@ class APIIngress(CoreSysAttributes):
|
||||
@require_home_assistant
|
||||
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
schema_ingress_config_session_data = await api_validate(
|
||||
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
|
||||
)
|
||||
data: IngressSessionData | None = None
|
||||
|
||||
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
|
||||
user = await self._find_user_by_id(
|
||||
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
|
||||
)
|
||||
if user:
|
||||
data = IngressSessionData(user)
|
||||
|
||||
session = self.sys_ingress.create_session(data)
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def validate_session(self, request: web.Request) -> None:
|
||||
async def validate_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Validate session and extending how long it's valid for."""
|
||||
data = await api_validate(VALIDATE_SESSION_DATA, request)
|
||||
|
||||
@ -147,22 +91,21 @@ class APIIngress(CoreSysAttributes):
|
||||
"""Route data to Supervisor ingress service."""
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS, "")
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path", "")
|
||||
session_data = self.sys_ingress.get_session_data(session)
|
||||
path = request.match_info.get("path")
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
return await self._handle_websocket(request, addon, path, session_data)
|
||||
return await self._handle_websocket(request, addon, path)
|
||||
|
||||
# Request
|
||||
return await self._handle_request(request, addon, path, session_data)
|
||||
return await self._handle_request(request, addon, path)
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
@ -170,11 +113,7 @@ class APIIngress(CoreSysAttributes):
|
||||
raise HTTPBadGateway()
|
||||
|
||||
async def _handle_websocket(
|
||||
self,
|
||||
request: web.Request,
|
||||
addon: Addon,
|
||||
path: str,
|
||||
session_data: IngressSessionData | None,
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> web.WebSocketResponse:
|
||||
"""Ingress route for websocket."""
|
||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||
@ -183,7 +122,7 @@ class APIIngress(CoreSysAttributes):
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = []
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
@ -192,7 +131,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
# Preparing
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon, session_data)
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
@ -218,15 +157,11 @@ class APIIngress(CoreSysAttributes):
|
||||
return ws_server
|
||||
|
||||
async def _handle_request(
|
||||
self,
|
||||
request: web.Request,
|
||||
addon: Addon,
|
||||
path: str,
|
||||
session_data: IngressSessionData | None,
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> web.Response | web.StreamResponse:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon, session_data)
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Passing the raw stream breaks requests for some webservers
|
||||
# since we just need it for POST requests really, for all other methods
|
||||
@ -249,18 +184,10 @@ class APIIngress(CoreSysAttributes):
|
||||
skip_auto_headers={hdrs.CONTENT_TYPE},
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
# Avoid parsing content_type in simple cases for better performance
|
||||
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
|
||||
content_type = (maybe_content_type.partition(";"))[0].strip()
|
||||
else:
|
||||
content_type = result.content_type
|
||||
|
||||
# Simple request
|
||||
if (
|
||||
# empty body responses should not be streamed,
|
||||
# otherwise aiohttp < 3.9.0 may generate
|
||||
# an invalid "0\r\n\r\n" chunk instead of an empty response.
|
||||
must_be_empty_body(request.method, result.status)
|
||||
or hdrs.CONTENT_LENGTH in result.headers
|
||||
hdrs.CONTENT_LENGTH in result.headers
|
||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||
):
|
||||
# Return Response
|
||||
@ -268,18 +195,17 @@ class APIIngress(CoreSysAttributes):
|
||||
return web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
content_type=content_type,
|
||||
content_type=result.content_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(status=result.status, headers=headers)
|
||||
response.content_type = content_type
|
||||
response.content_type = result.content_type
|
||||
|
||||
try:
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
async for data, _ in result.content.iter_chunks():
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (
|
||||
@ -291,34 +217,10 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
return response
|
||||
|
||||
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
|
||||
"""Find user object by the user's ID."""
|
||||
try:
|
||||
list_of_users = await self.sys_homeassistant.get_users()
|
||||
except (HomeAssistantAPIError, TypeError) as err:
|
||||
_LOGGER.error(
|
||||
"%s error occurred while requesting list of users: %s", type(err), err
|
||||
)
|
||||
return None
|
||||
|
||||
if list_of_users is not None:
|
||||
self._list_of_users = list_of_users
|
||||
|
||||
return next((user for user in self._list_of_users if user.id == user_id), None)
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: Addon, session_data: IngressSessionData | None
|
||||
) -> CIMultiDict[str]:
|
||||
def _init_header(request: web.Request, addon: str) -> CIMultiDict | dict[str, str]:
|
||||
"""Create initial header."""
|
||||
headers = CIMultiDict[str]()
|
||||
|
||||
if session_data is not None:
|
||||
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
||||
if session_data.user.username is not None:
|
||||
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
|
||||
if session_data.user.display_name is not None:
|
||||
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
|
||||
headers = {}
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
@ -332,25 +234,21 @@ def _init_header(
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
istr(HEADER_TOKEN),
|
||||
istr(HEADER_TOKEN_OLD),
|
||||
istr(HEADER_REMOTE_USER_ID),
|
||||
istr(HEADER_REMOTE_USER_NAME),
|
||||
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
|
||||
):
|
||||
continue
|
||||
headers.add(name, value)
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
if request.transport:
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> CIMultiDict[str]:
|
||||
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = CIMultiDict[str]()
|
||||
headers = {}
|
||||
|
||||
for name, value in response.headers.items():
|
||||
if name in (
|
||||
@ -360,7 +258,7 @@ def _response_header(response: aiohttp.ClientResponse) -> CIMultiDict[str]:
|
||||
hdrs.CONTENT_ENCODING,
|
||||
):
|
||||
continue
|
||||
headers.add(name, value)
|
||||
headers[name] = value
|
||||
|
||||
return headers
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor Jobs RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -7,10 +6,7 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound, JobNotFound
|
||||
from ..jobs import SupervisorJob
|
||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||
from .const import ATTR_JOBS
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@ -23,68 +19,11 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
class APIJobs(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
||||
"""Extract job from request or raise."""
|
||||
try:
|
||||
return self.sys_jobs.get_job(request.match_info["uuid"])
|
||||
except JobNotFound:
|
||||
raise APINotFound("Job does not exist") from None
|
||||
|
||||
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||
"""Return current job tree.
|
||||
|
||||
Jobs are added to cache as they are created so by default they are in oldest to newest.
|
||||
This is correct ordering for child jobs as it makes logical sense to present those in
|
||||
the order they occurred within the parent. For the list as a whole, sort from newest
|
||||
to oldest as its likely any client is most interested in the newer ones.
|
||||
"""
|
||||
# Initially sort oldest to newest so all child lists end up in correct order
|
||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||
for job in sorted(self.sys_jobs.jobs):
|
||||
if job.internal:
|
||||
continue
|
||||
|
||||
if job.parent_id not in jobs_by_parent:
|
||||
jobs_by_parent[job.parent_id] = [job]
|
||||
else:
|
||||
jobs_by_parent[job.parent_id].append(job)
|
||||
|
||||
# After parent-child organization, sort the root jobs only from newest to oldest
|
||||
job_list: list[dict[str, Any]] = []
|
||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||
[(job_list, start)]
|
||||
if start
|
||||
else [
|
||||
(job_list, job)
|
||||
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
|
||||
]
|
||||
)
|
||||
|
||||
while queue:
|
||||
(current_list, current_job) = queue.pop(0)
|
||||
child_jobs: list[dict[str, Any]] = []
|
||||
|
||||
# We remove parent_id and instead use that info to represent jobs as a tree
|
||||
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
|
||||
job_dict.pop("parent_id")
|
||||
current_list.append(job_dict)
|
||||
|
||||
if current_job.uuid in jobs_by_parent:
|
||||
queue.extend(
|
||||
[
|
||||
(child_jobs, job)
|
||||
for job in jobs_by_parent.get(current_job.uuid, [])
|
||||
]
|
||||
)
|
||||
|
||||
return job_list
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return JobManager information."""
|
||||
return {
|
||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||
ATTR_JOBS: self._list_jobs(),
|
||||
}
|
||||
|
||||
@api_process
|
||||
@ -95,27 +34,11 @@ class APIJobs(CoreSysAttributes):
|
||||
if ATTR_IGNORE_CONDITIONS in body:
|
||||
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
|
||||
|
||||
await self.sys_jobs.save_data()
|
||||
self.sys_jobs.save_data()
|
||||
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
await self.sys_jobs.reset_data()
|
||||
|
||||
@api_process
|
||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get details of a job by ID."""
|
||||
job = self._extract_job(request)
|
||||
return self._list_jobs(job)[0]
|
||||
|
||||
@api_process
|
||||
async def remove_job(self, request: web.Request) -> None:
|
||||
"""Remove a completed job."""
|
||||
job = self._extract_job(request)
|
||||
|
||||
if not job.done:
|
||||
raise APIError(f"Job {job.uuid} is not done!")
|
||||
|
||||
self.sys_jobs.remove_job(job)
|
||||
self.sys_jobs.reset_data()
|
||||
|
@ -1,17 +1,13 @@
|
||||
"""Handle security part of this API."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Request, Response, middleware
|
||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from supervisor.homeassistant.const import LANDINGPAGE
|
||||
|
||||
from ...addons.const import RE_SLUG
|
||||
from ...const import (
|
||||
REQUEST_FROM,
|
||||
@ -20,11 +16,10 @@ from ...const import (
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER,
|
||||
VALID_API_STATES,
|
||||
CoreState,
|
||||
)
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ...utils import version_is_new_enough
|
||||
from ..utils import api_return_error, extract_supervisor_token
|
||||
from ..utils import api_return_error, excract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
|
||||
@ -81,13 +76,6 @@ ADDONS_API_BYPASS: Final = re.compile(
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Home Assistant only
|
||||
CORE_ONLY_PATHS: Final = re.compile(
|
||||
r"^(?:"
|
||||
r"/addons/" + RE_SLUG + "/sys_options"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Policy role add-on API access
|
||||
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||
ROLE_DEFAULT: re.compile(
|
||||
@ -114,8 +102,6 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
||||
r"|/audio/.+"
|
||||
r"|/auth/cache"
|
||||
r"|/available_updates"
|
||||
r"|/backups.*"
|
||||
r"|/cli/.+"
|
||||
r"|/core/.+"
|
||||
r"|/dns/.+"
|
||||
@ -125,17 +111,16 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||
r"|/hassos/.+"
|
||||
r"|/homeassistant/.+"
|
||||
r"|/host/.+"
|
||||
r"|/mounts.*"
|
||||
r"|/multicast/.+"
|
||||
r"|/network/.+"
|
||||
r"|/observer/.+"
|
||||
r"|/os/(?!datadisk/wipe).+"
|
||||
r"|/refresh_updates"
|
||||
r"|/os/.+"
|
||||
r"|/resolution/.+"
|
||||
r"|/security/.+"
|
||||
r"|/backups.*"
|
||||
r"|/snapshots.*"
|
||||
r"|/store.*"
|
||||
r"|/supervisor/.+"
|
||||
r"|/security/.+"
|
||||
r")$"
|
||||
),
|
||||
ROLE_ADMIN: re.compile(
|
||||
@ -180,7 +165,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def block_bad_requests(self, request: Request, handler: Callable) -> Response:
|
||||
async def block_bad_requests(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(self._recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
@ -198,20 +185,28 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def system_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def system_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check if core is ready to response."""
|
||||
if self.sys_core.state not in VALID_API_STATES:
|
||||
if self.sys_core.state not in (
|
||||
CoreState.STARTUP,
|
||||
CoreState.RUNNING,
|
||||
CoreState.FREEZE,
|
||||
):
|
||||
return api_return_error(
|
||||
message=f"System is not ready with state: {self.sys_core.state}"
|
||||
message=f"System is not ready with state: {self.sys_core.state.value}"
|
||||
)
|
||||
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def token_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def token_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check security access of this layer."""
|
||||
request_from: CoreSysAttributes | None = None
|
||||
supervisor_token = extract_supervisor_token(request)
|
||||
request_from = None
|
||||
supervisor_token = excract_supervisor_token(request)
|
||||
|
||||
# Blacklist
|
||||
if BLACKLIST.match(request.path):
|
||||
@ -233,9 +228,6 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||
request_from = self.sys_homeassistant
|
||||
elif CORE_ONLY_PATHS.match(request.path):
|
||||
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
|
||||
raise HTTPForbidden()
|
||||
|
||||
# Host
|
||||
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
||||
@ -279,12 +271,11 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPForbidden()
|
||||
|
||||
@middleware
|
||||
async def core_proxy(self, request: Request, handler: Callable) -> Response:
|
||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
||||
"""Validate user from Core API proxy."""
|
||||
if (
|
||||
request[REQUEST_FROM] != self.sys_homeassistant
|
||||
or self.sys_homeassistant.version == LANDINGPAGE
|
||||
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
|
||||
or self.sys_homeassistant.version >= _CORE_VERSION
|
||||
):
|
||||
return await handler(request)
|
||||
|
||||
|
@ -1,17 +1,17 @@
|
||||
"""Inits file for supervisor mounts REST API."""
|
||||
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import ATTR_NAME, ATTR_STATE
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound
|
||||
from ..exceptions import APIError
|
||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG, MountData
|
||||
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||
from .const import ATTR_MOUNTS
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
@ -24,13 +24,6 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
class APIMounts(CoreSysAttributes):
|
||||
"""Handle REST API for mounting options."""
|
||||
|
||||
def _extract_mount(self, request: web.Request) -> Mount:
|
||||
"""Extract mount from request or raise."""
|
||||
name = request.match_info["mount"]
|
||||
if name not in self.sys_mounts:
|
||||
raise APINotFound(f"No mount exists with name {name}")
|
||||
return self.sys_mounts.get(name)
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return MountManager info."""
|
||||
@ -39,13 +32,7 @@ class APIMounts(CoreSysAttributes):
|
||||
if self.sys_mounts.default_backup_mount
|
||||
else None,
|
||||
ATTR_MOUNTS: [
|
||||
mount.to_dict()
|
||||
| {
|
||||
ATTR_STATE: mount.state,
|
||||
ATTR_USER_PATH: mount.container_where.as_posix()
|
||||
if mount.container_where
|
||||
else None,
|
||||
}
|
||||
mount.to_dict() | {ATTR_STATE: mount.state}
|
||||
for mount in self.sys_mounts.mounts
|
||||
],
|
||||
}
|
||||
@ -66,15 +53,15 @@ class APIMounts(CoreSysAttributes):
|
||||
else:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def create_mount(self, request: web.Request) -> None:
|
||||
"""Create a new mount in supervisor."""
|
||||
body = cast(MountData, await api_validate(SCHEMA_MOUNT_CONFIG, request))
|
||||
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
|
||||
|
||||
if body["name"] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body['name']}")
|
||||
if body[ATTR_NAME] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@ -87,20 +74,19 @@ class APIMounts(CoreSysAttributes):
|
||||
if not self.sys_mounts.default_backup_mount:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def update_mount(self, request: web.Request) -> None:
|
||||
"""Update an existing mount in supervisor."""
|
||||
current = self._extract_mount(request)
|
||||
name = request.match_info.get("mount")
|
||||
name_schema = vol.Schema(
|
||||
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
body = cast(
|
||||
MountData,
|
||||
await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request),
|
||||
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||
|
||||
if name not in self.sys_mounts:
|
||||
raise APIError(f"No mount exists with name {name}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@ -113,26 +99,26 @@ class APIMounts(CoreSysAttributes):
|
||||
elif self.sys_mounts.default_backup_mount == mount:
|
||||
self.sys_mounts.default_backup_mount = None
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def delete_mount(self, request: web.Request) -> None:
|
||||
"""Delete an existing mount in supervisor."""
|
||||
current = self._extract_mount(request)
|
||||
mount = await self.sys_mounts.remove_mount(current.name)
|
||||
name = request.match_info.get("mount")
|
||||
mount = await self.sys_mounts.remove_mount(name)
|
||||
|
||||
# If it was a backup mount, reload backups
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload_mount(self, request: web.Request) -> None:
|
||||
"""Reload an existing mount in supervisor."""
|
||||
mount = self._extract_mount(request)
|
||||
await self.sys_mounts.reload_mount(mount.name)
|
||||
name = request.match_info.get("mount")
|
||||
await self.sys_mounts.reload_mount(name)
|
||||
|
||||
# If it's a backup mount, reload backups
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor Multicast RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
@ -24,7 +23,8 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import version_tag
|
||||
from .utils import api_process, api_validate
|
||||
from .const import CONTENT_TYPE_BINARY
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -69,6 +69,11 @@ class APIMulticast(CoreSysAttributes):
|
||||
raise APIError(f"Version {version} is already in use")
|
||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Multicast Docker logs."""
|
||||
return self.sys_plugins.multicast.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Multicast plugin."""
|
||||
|
@ -1,16 +1,15 @@
|
||||
"""REST API for network."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
|
||||
from ipaddress import ip_address, ip_interface
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACCESSPOINTS,
|
||||
ATTR_ADDR_GEN_MODE,
|
||||
ATTR_ADDRESS,
|
||||
ATTR_AUTH,
|
||||
ATTR_CONNECTED,
|
||||
@ -23,7 +22,6 @@ from ..const import (
|
||||
ATTR_ID,
|
||||
ATTR_INTERFACE,
|
||||
ATTR_INTERFACES,
|
||||
ATTR_IP6_PRIVACY,
|
||||
ATTR_IPV4,
|
||||
ATTR_IPV6,
|
||||
ATTR_MAC,
|
||||
@ -40,43 +38,28 @@ from ..const import (
|
||||
ATTR_TYPE,
|
||||
ATTR_VLAN,
|
||||
ATTR_WIFI,
|
||||
DOCKER_IPV4_NETWORK_MASK,
|
||||
DOCKER_NETWORK,
|
||||
DOCKER_NETWORK_MASK,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound, HostNetworkNotFound
|
||||
from ..host.configuration import (
|
||||
from ..exceptions import APIError, HostNetworkNotFound
|
||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||
from ..host.network import (
|
||||
AccessPoint,
|
||||
Interface,
|
||||
InterfaceAddrGenMode,
|
||||
InterfaceIp6Privacy,
|
||||
InterfaceMethod,
|
||||
Ip6Setting,
|
||||
IpConfig,
|
||||
IpSetting,
|
||||
VlanConfig,
|
||||
WifiConfig,
|
||||
)
|
||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_SCHEMA_IPV4_CONFIG = vol.Schema(
|
||||
_SCHEMA_IP_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
|
||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
|
||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
|
||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
|
||||
}
|
||||
)
|
||||
|
||||
_SCHEMA_IPV6_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
|
||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||
vol.Optional(ATTR_ADDR_GEN_MODE): vol.Coerce(InterfaceAddrGenMode),
|
||||
vol.Optional(ATTR_IP6_PRIVACY): vol.Coerce(InterfaceIp6Privacy),
|
||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
|
||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
|
||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
|
||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
|
||||
}
|
||||
)
|
||||
|
||||
@ -93,31 +76,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_UPDATE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
|
||||
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
|
||||
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
|
||||
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
|
||||
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
||||
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def ip4config_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
||||
"""Return a dict with information about IPv4 configuration."""
|
||||
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
|
||||
"""Return a dict with information about ip configuration."""
|
||||
return {
|
||||
ATTR_METHOD: setting.method,
|
||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||
ATTR_READY: config.ready,
|
||||
}
|
||||
|
||||
|
||||
def ip6config_struct(config: IpConfig, setting: Ip6Setting) -> dict[str, Any]:
|
||||
"""Return a dict with information about IPv6 configuration."""
|
||||
return {
|
||||
ATTR_METHOD: setting.method,
|
||||
ATTR_ADDR_GEN_MODE: setting.addr_gen_mode,
|
||||
ATTR_IP6_PRIVACY: setting.ip6_privacy,
|
||||
ATTR_METHOD: config.method,
|
||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||
@ -151,13 +121,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
||||
ATTR_ENABLED: interface.enabled,
|
||||
ATTR_CONNECTED: interface.connected,
|
||||
ATTR_PRIMARY: interface.primary,
|
||||
ATTR_MAC: interface.mac,
|
||||
ATTR_IPV4: ip4config_struct(interface.ipv4, interface.ipv4setting)
|
||||
if interface.ipv4 and interface.ipv4setting
|
||||
else None,
|
||||
ATTR_IPV6: ip6config_struct(interface.ipv6, interface.ipv6setting)
|
||||
if interface.ipv6 and interface.ipv6setting
|
||||
else None,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
@ -191,7 +156,7 @@ class APINetwork(CoreSysAttributes):
|
||||
except HostNetworkNotFound:
|
||||
pass
|
||||
|
||||
raise APINotFound(f"Interface {name} does not exist") from None
|
||||
raise APIError(f"Interface {name} does not exist") from None
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
@ -203,7 +168,7 @@ class APINetwork(CoreSysAttributes):
|
||||
],
|
||||
ATTR_DOCKER: {
|
||||
ATTR_INTERFACE: DOCKER_NETWORK,
|
||||
ATTR_ADDRESS: str(DOCKER_IPV4_NETWORK_MASK),
|
||||
ATTR_ADDRESS: str(DOCKER_NETWORK_MASK),
|
||||
ATTR_GATEWAY: str(self.sys_docker.network.gateway),
|
||||
ATTR_DNS: str(self.sys_docker.network.dns),
|
||||
},
|
||||
@ -214,14 +179,14 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def interface_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return network information for a interface."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
return interface_struct(interface)
|
||||
|
||||
@api_process
|
||||
async def interface_update(self, request: web.Request) -> None:
|
||||
"""Update the configuration of an interface."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Validate data
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
@ -231,32 +196,24 @@ class APINetwork(CoreSysAttributes):
|
||||
# Apply config
|
||||
for key, config in body.items():
|
||||
if key == ATTR_IPV4:
|
||||
interface.ipv4setting = IpSetting(
|
||||
method=config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||
address=config.get(ATTR_ADDRESS, []),
|
||||
gateway=config.get(ATTR_GATEWAY),
|
||||
nameservers=config.get(ATTR_NAMESERVERS, []),
|
||||
interface.ipv4 = attr.evolve(
|
||||
interface.ipv4
|
||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_IPV6:
|
||||
interface.ipv6setting = Ip6Setting(
|
||||
method=config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||
addr_gen_mode=config.get(
|
||||
ATTR_ADDR_GEN_MODE, InterfaceAddrGenMode.DEFAULT
|
||||
),
|
||||
ip6_privacy=config.get(
|
||||
ATTR_IP6_PRIVACY, InterfaceIp6Privacy.DEFAULT
|
||||
),
|
||||
address=config.get(ATTR_ADDRESS, []),
|
||||
gateway=config.get(ATTR_GATEWAY),
|
||||
nameservers=config.get(ATTR_NAMESERVERS, []),
|
||||
interface.ipv6 = attr.evolve(
|
||||
interface.ipv6
|
||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_WIFI:
|
||||
interface.wifi = WifiConfig(
|
||||
mode=config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
|
||||
ssid=config.get(ATTR_SSID, ""),
|
||||
auth=config.get(ATTR_AUTH, AuthMethod.OPEN),
|
||||
psk=config.get(ATTR_PSK, None),
|
||||
signal=None,
|
||||
interface.wifi = attr.evolve(
|
||||
interface.wifi
|
||||
or WifiConfig(
|
||||
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
||||
),
|
||||
**config,
|
||||
)
|
||||
elif key == ATTR_ENABLED:
|
||||
interface.enabled = config
|
||||
@ -273,7 +230,7 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Only wlan is supported
|
||||
if interface.type != InterfaceType.WIRELESS:
|
||||
@ -286,10 +243,8 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def create_vlan(self, request: web.Request) -> None:
|
||||
"""Create a new vlan."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
vlan = int(request.match_info.get(ATTR_VLAN, -1))
|
||||
if vlan < 0:
|
||||
raise APIError(f"Invalid vlan specified: {vlan}")
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
vlan = int(request.match_info.get(ATTR_VLAN))
|
||||
|
||||
# Only ethernet is supported
|
||||
if interface.type != InterfaceType.ETHERNET:
|
||||
@ -300,42 +255,34 @@ class APINetwork(CoreSysAttributes):
|
||||
|
||||
vlan_config = VlanConfig(vlan, interface.name)
|
||||
|
||||
ipv4_setting = None
|
||||
ipv4_config = None
|
||||
if ATTR_IPV4 in body:
|
||||
ipv4_setting = IpSetting(
|
||||
method=body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
address=body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||
gateway=body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||
nameservers=body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||
ipv4_config = IpConfig(
|
||||
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||
None,
|
||||
)
|
||||
|
||||
ipv6_setting = None
|
||||
ipv6_config = None
|
||||
if ATTR_IPV6 in body:
|
||||
ipv6_setting = Ip6Setting(
|
||||
method=body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
addr_gen_mode=body[ATTR_IPV6].get(
|
||||
ATTR_ADDR_GEN_MODE, InterfaceAddrGenMode.DEFAULT
|
||||
),
|
||||
ip6_privacy=body[ATTR_IPV6].get(
|
||||
ATTR_IP6_PRIVACY, InterfaceIp6Privacy.DEFAULT
|
||||
),
|
||||
address=body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||
gateway=body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||
nameservers=body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||
ipv6_config = IpConfig(
|
||||
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||
None,
|
||||
)
|
||||
|
||||
vlan_interface = Interface(
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
InterfaceType.VLAN,
|
||||
None,
|
||||
ipv4_setting,
|
||||
None,
|
||||
ipv6_setting,
|
||||
ipv4_config,
|
||||
ipv6_config,
|
||||
None,
|
||||
vlan_config,
|
||||
)
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""Init file for Supervisor Observer RESTful API."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any
|
||||
|
@ -1,59 +1,47 @@
|
||||
"""Init file for Supervisor HassOS RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACTIVITY_LED,
|
||||
ATTR_BOARD,
|
||||
ATTR_BOOT,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DISK_LED,
|
||||
ATTR_HEARTBEAT_LED,
|
||||
ATTR_ID,
|
||||
ATTR_NAME,
|
||||
ATTR_POWER_LED,
|
||||
ATTR_SERIAL,
|
||||
ATTR_SIZE,
|
||||
ATTR_STATE,
|
||||
ATTR_SWAP_SIZE,
|
||||
ATTR_SWAPPINESS,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APINotFound, BoardInvalidError
|
||||
from ..exceptions import BoardInvalidError
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..validate import version_tag
|
||||
from .const import (
|
||||
ATTR_BOOT_SLOT,
|
||||
ATTR_BOOT_SLOTS,
|
||||
ATTR_DATA_DISK,
|
||||
ATTR_DEV_PATH,
|
||||
ATTR_DEVICE,
|
||||
ATTR_DISK_LED,
|
||||
ATTR_DISKS,
|
||||
ATTR_HEARTBEAT_LED,
|
||||
ATTR_MODEL,
|
||||
ATTR_STATUS,
|
||||
ATTR_SYSTEM_HEALTH_LED,
|
||||
ATTR_POWER_LED,
|
||||
ATTR_VENDOR,
|
||||
BootSlot,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
|
||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
||||
@ -61,23 +49,6 @@ SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
|
||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
||||
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
RE_SWAP_SIZE = re.compile(r"^\d+([KMG](i?B)?|B)?$", re.IGNORECASE)
|
||||
|
||||
SCHEMA_SWAP_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SWAP_SIZE): vol.Match(RE_SWAP_SIZE),
|
||||
vol.Optional(ATTR_SWAPPINESS): vol.All(int, vol.Range(min=0, max=200)),
|
||||
}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class APIOS(CoreSysAttributes):
|
||||
@ -93,15 +64,6 @@ class APIOS(CoreSysAttributes):
|
||||
ATTR_BOARD: self.sys_os.board,
|
||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
||||
ATTR_BOOT_SLOTS: {
|
||||
slot.bootname: {
|
||||
ATTR_STATE: slot.state,
|
||||
ATTR_STATUS: slot.boot_status,
|
||||
ATTR_VERSION: slot.bundle_version,
|
||||
}
|
||||
for slot in self.sys_os.slots
|
||||
if slot.bootname
|
||||
},
|
||||
}
|
||||
|
||||
@api_process
|
||||
@ -124,17 +86,6 @@ class APIOS(CoreSysAttributes):
|
||||
|
||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||
|
||||
@api_process
|
||||
def wipe_data(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger data disk wipe on Host."""
|
||||
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
|
||||
|
||||
@api_process
|
||||
async def set_boot_slot(self, request: web.Request) -> None:
|
||||
"""Change the active boot slot and reboot into it."""
|
||||
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
|
||||
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
|
||||
|
||||
@api_process
|
||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return possible data targets."""
|
||||
@ -154,35 +105,6 @@ class APIOS(CoreSysAttributes):
|
||||
],
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get green board settings."""
|
||||
return {
|
||||
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
|
||||
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
|
||||
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def boards_green_options(self, request: web.Request) -> None:
|
||||
"""Update green board settings."""
|
||||
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||
|
||||
if ATTR_ACTIVITY_LED in body:
|
||||
await self.sys_dbus.agent.board.green.set_activity_led(
|
||||
body[ATTR_ACTIVITY_LED]
|
||||
)
|
||||
|
||||
if ATTR_POWER_LED in body:
|
||||
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||
await self.sys_dbus.agent.board.green.set_user_led(
|
||||
body[ATTR_SYSTEM_HEALTH_LED]
|
||||
)
|
||||
|
||||
await self.sys_dbus.agent.board.green.save_data()
|
||||
|
||||
@api_process
|
||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get yellow board settings."""
|
||||
@ -198,17 +120,14 @@ class APIOS(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
||||
|
||||
if ATTR_DISK_LED in body:
|
||||
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
||||
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
|
||||
|
||||
if ATTR_HEARTBEAT_LED in body:
|
||||
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
||||
body[ATTR_HEARTBEAT_LED]
|
||||
)
|
||||
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
|
||||
|
||||
if ATTR_POWER_LED in body:
|
||||
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
||||
|
||||
await self.sys_dbus.agent.board.yellow.save_data()
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
@ -224,53 +143,3 @@ class APIOS(CoreSysAttributes):
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
@api_process
|
||||
async def config_swap_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get swap settings."""
|
||||
if (
|
||||
not self.coresys.os.available
|
||||
or not self.coresys.os.version
|
||||
or self.coresys.os.version < "15.0"
|
||||
):
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SWAP_SIZE: self.sys_dbus.agent.swap.swap_size,
|
||||
ATTR_SWAPPINESS: self.sys_dbus.agent.swap.swappiness,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def config_swap_options(self, request: web.Request) -> None:
|
||||
"""Update swap settings."""
|
||||
if (
|
||||
not self.coresys.os.available
|
||||
or not self.coresys.os.version
|
||||
or self.coresys.os.version < "15.0"
|
||||
):
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
body = await api_validate(SCHEMA_SWAP_OPTIONS, request)
|
||||
|
||||
reboot_required = False
|
||||
|
||||
if ATTR_SWAP_SIZE in body:
|
||||
old_size = self.sys_dbus.agent.swap.swap_size
|
||||
await self.sys_dbus.agent.swap.set_swap_size(body[ATTR_SWAP_SIZE])
|
||||
reboot_required = reboot_required or old_size != body[ATTR_SWAP_SIZE]
|
||||
|
||||
if ATTR_SWAPPINESS in body:
|
||||
old_swappiness = self.sys_dbus.agent.swap.swappiness
|
||||
await self.sys_dbus.agent.swap.set_swappiness(body[ATTR_SWAPPINESS])
|
||||
reboot_required = reboot_required or old_swappiness != body[ATTR_SWAPPINESS]
|
||||
|
||||
if reboot_required:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
||||
)
|
||||
|
@ -1 +1 @@
|
||||
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([5-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(1{2}\d|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[5-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.35399ae87c70acf8.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}else d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}()
|
||||
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-NoHhvMA3Ku8.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-G81gb268sps.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-NoHhvMA3Ku8.js")}}()
|
Binary file not shown.
Binary file not shown.
2
supervisor/api/panel/frontend_es5/1036-6IMueKVv3m4.js
Normal file
2
supervisor/api/panel/frontend_es5/1036-6IMueKVv3m4.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1036-6IMueKVv3m4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1036-6IMueKVv3m4.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js
Normal file
2
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js
Normal file
@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
|
||||
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map
|
BIN
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1047-g7fFLS9eP4I.js.gz
Normal file
Binary file not shown.
@ -0,0 +1 @@
|
||||
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230601.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
{"version":3,"file":"1081.91949d686e61cc12.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"qXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,IACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}
|
2
supervisor/api/panel/frontend_es5/1116-YCx9f7hKX80.js
Normal file
2
supervisor/api/panel/frontend_es5/1116-YCx9f7hKX80.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1116-YCx9f7hKX80.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1116-YCx9f7hKX80.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/1193-AhESuEdTugg.js
Normal file
2
supervisor/api/panel/frontend_es5/1193-AhESuEdTugg.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1193-AhESuEdTugg.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1193-AhESuEdTugg.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@ -1,2 +0,0 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
|
||||
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
@ -1,2 +0,0 @@
|
||||
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
||||
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}
|
3
supervisor/api/panel/frontend_es5/1246-xNkZ7MzqHIg.js
Normal file
3
supervisor/api/panel/frontend_es5/1246-xNkZ7MzqHIg.js
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user