mirror of
https://github.com/home-assistant/core.git
synced 2025-09-19 18:09:48 +00:00
Compare commits
1 Commits
target_web
...
cloud-tts-
Author | SHA1 | Date | |
---|---|---|---|
![]() |
40c71fbaa9 |
@@ -8,9 +8,6 @@
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
// Node feature required for Claude Code until fixed https://github.com/anthropics/devcontainer-features/issues/28
|
||||
"ghcr.io/devcontainers/features/node:1": {},
|
||||
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
|
@@ -14,8 +14,7 @@ tests
|
||||
|
||||
# Other virtualization methods
|
||||
venv
|
||||
.venv
|
||||
.vagrant
|
||||
|
||||
# Temporary files
|
||||
**/__pycache__
|
||||
**/__pycache__
|
5
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
5
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,14 +1,15 @@
|
||||
name: Report an issue with Home Assistant Core
|
||||
description: Report an issue with Home Assistant Core.
|
||||
type: Bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
This issue form is for reporting bugs only!
|
||||
|
||||
If you have a feature or enhancement request, please [request them here instead][fr].
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
|
||||
[fr]: https://github.com/orgs/home-assistant/discussions
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -10,8 +10,8 @@ contact_links:
|
||||
url: https://www.home-assistant.io/help
|
||||
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
||||
- name: Feature Request
|
||||
url: https://github.com/orgs/home-assistant/discussions
|
||||
about: Please use this link to request new features or enhancements to existing features.
|
||||
url: https://community.home-assistant.io/c/feature-requests
|
||||
about: Please use our Community Forum for making feature requests.
|
||||
- name: I'm unsure where to go
|
||||
url: https://www.home-assistant.io/join-chat
|
||||
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
||||
|
53
.github/ISSUE_TEMPLATE/task.yml
vendored
53
.github/ISSUE_TEMPLATE/task.yml
vendored
@@ -1,53 +0,0 @@
|
||||
name: Task
|
||||
description: For staff only - Create a task
|
||||
type: Task
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## ⚠️ RESTRICTED ACCESS
|
||||
|
||||
**This form is restricted to Open Home Foundation staff, authorized contributors, and integration code owners only.**
|
||||
|
||||
If you are a community member wanting to contribute, please:
|
||||
- For bug reports: Use the [bug report form](https://github.com/home-assistant/core/issues/new?template=bug_report.yml)
|
||||
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
|
||||
|
||||
---
|
||||
|
||||
### For authorized contributors
|
||||
|
||||
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
||||
|
||||
Be specific about what needs to be done, why it's important, and any constraints or requirements.
|
||||
placeholder: |
|
||||
Describe the task, including:
|
||||
- What needs to be done
|
||||
- Why this task is needed
|
||||
- Expected outcome
|
||||
- Any constraints or requirements
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional_context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Any additional information, links, research, or context that would be helpful.
|
||||
|
||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
||||
placeholder: |
|
||||
- Roadmap opportunity: [link]
|
||||
- Epic: [link]
|
||||
- Feature request: [link]
|
||||
- Technical design documents: [link]
|
||||
- Prototype/mockup: [link]
|
||||
- Dependencies: [links]
|
||||
validations:
|
||||
required: false
|
1254
.github/copilot-instructions.md
vendored
1254
.github/copilot-instructions.md
vendored
File diff suppressed because it is too large
Load Diff
3
.github/dependabot.yml
vendored
3
.github/dependabot.yml
vendored
@@ -6,6 +6,3 @@ updates:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
labels:
|
||||
- dependency
|
||||
- github_actions
|
||||
|
50
.github/workflows/builder.yml
vendored
50
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -90,11 +90,11 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,10 +105,10 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
repo: home-assistant/intents-package
|
||||
branch: main
|
||||
workflow: nightly.yaml
|
||||
workflow_conclusion: success
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,7 +190,7 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -321,23 +321,23 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
uses: sigstore/cosign-installer@v3.8.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -454,15 +454,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -480,7 +480,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -499,17 +499,17 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
235
.github/workflows/ci.yaml
vendored
235
.github/workflows/ci.yaml
vendored
@@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 7
|
||||
CACHE_VERSION: 12
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.6"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -246,20 +246,20 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -271,12 +271,12 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
@@ -292,30 +292,30 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
@@ -332,35 +332,35 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
@@ -372,30 +372,30 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
|
||||
- name: Register yamllint problem matcher
|
||||
@@ -462,7 +462,7 @@ jobs:
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -481,10 +481,10 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -497,27 +497,26 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-uv-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -579,28 +578,27 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run hassfest
|
||||
run: |
|
||||
@@ -619,21 +617,21 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run gen_requirements_all.py
|
||||
run: |
|
||||
@@ -653,9 +651,9 @@ jobs:
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.3
|
||||
uses: actions/dependency-review-action@v4.6.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -676,21 +674,21 @@ jobs:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Extract license data
|
||||
run: |
|
||||
@@ -719,21 +717,21 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
@@ -766,21 +764,21 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
@@ -811,10 +809,10 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -827,22 +825,22 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@v4.2.3
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-mypy-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-mypy-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-mypy-${{
|
||||
env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Register mypy problem matcher
|
||||
@@ -879,7 +877,6 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -889,21 +886,21 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run split_tests.py
|
||||
run: |
|
||||
@@ -940,7 +937,6 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -948,24 +944,22 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@@ -974,7 +968,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1025,12 +1019,6 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1074,7 +1062,6 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -1082,24 +1069,22 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@@ -1167,12 +1152,6 @@ jobs:
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1215,34 +1194,31 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libxml2-utils
|
||||
libturbojpeg
|
||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@@ -1311,12 +1287,6 @@ jobs:
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1340,14 +1310,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1377,7 +1347,6 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
@@ -1385,24 +1354,22 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.2.3
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
@@ -1465,12 +1432,6 @@ jobs:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
overwrite: true
|
||||
- name: Beautify test results
|
||||
# For easier identification of parsing errors
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
run: |
|
||||
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
@@ -1491,14 +1452,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1518,7 +1479,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.1
|
||||
uses: github/codeql-action/init@v3.28.17
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.1
|
||||
uses: github/codeql-action/analyze@v3.28.17
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
385
.github/workflows/detect-duplicate-issues.yml
vendored
385
.github/workflows/detect-duplicate-issues.yml
vendored
@@ -1,385 +0,0 @@
|
||||
name: Auto-detect duplicate issues
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
console.log('Event name:', context.eventName);
|
||||
console.log('Event action:', context.payload.action);
|
||||
console.log('Event payload keys:', Object.keys(context.payload));
|
||||
|
||||
// Check the specific label that was added
|
||||
const addedLabel = context.payload.label;
|
||||
if (!addedLabel) {
|
||||
console.log('No label found in labeled event payload');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Label added: ${addedLabel.name}`);
|
||||
|
||||
if (!addedLabel.name.startsWith('integration:')) {
|
||||
console.log('Added label is not an integration label, skipping duplicate detection');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Integration label added: ${addedLabel.name}`);
|
||||
|
||||
let currentIssue;
|
||||
let integrationLabels = [];
|
||||
|
||||
try {
|
||||
const issue = await github.rest.issues.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number
|
||||
});
|
||||
|
||||
currentIssue = issue.data;
|
||||
|
||||
// Check if potential-duplicate label already exists
|
||||
const hasPotentialDuplicateLabel = currentIssue.labels
|
||||
.some(label => label.name === 'potential-duplicate');
|
||||
|
||||
if (hasPotentialDuplicateLabel) {
|
||||
console.log('Issue already has potential-duplicate label, skipping duplicate detection');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
integrationLabels = currentIssue.labels
|
||||
.filter(label => label.name.startsWith('integration:'))
|
||||
.map(label => label.name);
|
||||
} catch (error) {
|
||||
core.error(`Failed to fetch issue #${context.payload.issue.number}:`, error.message);
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if we've already posted a duplicate detection comment recently
|
||||
let comments;
|
||||
try {
|
||||
comments = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
per_page: 10
|
||||
});
|
||||
} catch (error) {
|
||||
core.error('Failed to fetch comments:', error.message);
|
||||
// Continue anyway, worst case we might post a duplicate comment
|
||||
comments = { data: [] };
|
||||
}
|
||||
|
||||
// Check if we've already posted a duplicate detection comment
|
||||
const recentDuplicateComment = comments.data.find(comment =>
|
||||
comment.user && comment.user.login === 'github-actions[bot]' &&
|
||||
comment.body.includes('<!-- workflow: detect-duplicate-issues -->')
|
||||
);
|
||||
|
||||
if (recentDuplicateComment) {
|
||||
console.log('Already posted duplicate detection comment, skipping');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
core.setOutput('should_continue', 'true');
|
||||
core.setOutput('current_number', currentIssue.number);
|
||||
core.setOutput('current_title', currentIssue.title);
|
||||
core.setOutput('current_body', currentIssue.body);
|
||||
core.setOutput('current_url', currentIssue.html_url);
|
||||
core.setOutput('integration_labels', JSON.stringify(integrationLabels));
|
||||
|
||||
console.log(`Current issue: #${currentIssue.number}`);
|
||||
console.log(`Integration labels: ${integrationLabels.join(', ')}`);
|
||||
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
with:
|
||||
script: |
|
||||
const integrationLabels = JSON.parse(process.env.INTEGRATION_LABELS);
|
||||
const currentNumber = parseInt(process.env.CURRENT_NUMBER);
|
||||
|
||||
if (integrationLabels.length === 0) {
|
||||
console.log('No integration labels found, skipping duplicate detection');
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Use GitHub search API to find issues with matching integration labels
|
||||
console.log(`Searching for issues with integration labels: ${integrationLabels.join(', ')}`);
|
||||
|
||||
// Build search query for issues with any of the current integration labels
|
||||
const labelQueries = integrationLabels.map(label => `label:"${label}"`);
|
||||
|
||||
// Calculate date 6 months ago
|
||||
const sixMonthsAgo = new Date();
|
||||
sixMonthsAgo.setMonth(sixMonthsAgo.getMonth() - 6);
|
||||
const dateFilter = `created:>=${sixMonthsAgo.toISOString().split('T')[0]}`;
|
||||
|
||||
let searchQuery;
|
||||
|
||||
if (labelQueries.length === 1) {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]} ${dateFilter}`;
|
||||
} else {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')}) ${dateFilter}`;
|
||||
}
|
||||
|
||||
console.log(`Search query: ${searchQuery}`);
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await github.rest.search.issuesAndPullRequests({
|
||||
q: searchQuery,
|
||||
per_page: 15,
|
||||
sort: 'updated',
|
||||
order: 'desc'
|
||||
});
|
||||
} catch (error) {
|
||||
core.error('Failed to search for similar issues:', error.message);
|
||||
if (error.status === 403 && error.message.includes('rate limit')) {
|
||||
core.error('GitHub API rate limit exceeded');
|
||||
}
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter out the current issue, pull requests, and newer issues (higher numbers)
|
||||
const similarIssues = result.data.items
|
||||
.filter(item =>
|
||||
item.number !== currentNumber &&
|
||||
!item.pull_request &&
|
||||
item.number < currentNumber // Only include older issues (lower numbers)
|
||||
)
|
||||
.map(item => ({
|
||||
number: item.number,
|
||||
title: item.title,
|
||||
body: item.body,
|
||||
url: item.html_url,
|
||||
state: item.state,
|
||||
createdAt: item.created_at,
|
||||
updatedAt: item.updated_at,
|
||||
comments: item.comments,
|
||||
labels: item.labels.map(l => l.name)
|
||||
}));
|
||||
|
||||
console.log(`Found ${similarIssues.length} issues with matching integration labels`);
|
||||
console.log('Raw similar issues:', JSON.stringify(similarIssues.slice(0, 3), null, 2));
|
||||
|
||||
if (similarIssues.length === 0) {
|
||||
console.log('No similar issues found, setting has_similar to false');
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Similar issues found, setting has_similar to true');
|
||||
core.setOutput('has_similar', 'true');
|
||||
|
||||
// Clean the issue data to prevent JSON parsing issues
|
||||
const cleanedIssues = similarIssues.slice(0, 15).map(item => {
|
||||
// Handle body with improved truncation and null handling
|
||||
let cleanBody = '';
|
||||
if (item.body && typeof item.body === 'string') {
|
||||
// Remove control characters
|
||||
const cleaned = item.body.replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||
// Truncate to 1000 characters and add ellipsis if needed
|
||||
cleanBody = cleaned.length > 1000
|
||||
? cleaned.substring(0, 1000) + '...'
|
||||
: cleaned;
|
||||
}
|
||||
|
||||
return {
|
||||
number: item.number,
|
||||
title: item.title.replace(/[\u0000-\u001F\u007F-\u009F]/g, ''), // Remove control characters
|
||||
body: cleanBody,
|
||||
url: item.url,
|
||||
state: item.state,
|
||||
createdAt: item.createdAt,
|
||||
updatedAt: item.updatedAt,
|
||||
comments: item.comments,
|
||||
labels: item.labels
|
||||
};
|
||||
});
|
||||
|
||||
console.log(`Cleaned issues count: ${cleanedIssues.length}`);
|
||||
console.log('First cleaned issue:', JSON.stringify(cleanedIssues[0], null, 2));
|
||||
|
||||
core.setOutput('similar_issues', JSON.stringify(cleanedIssues));
|
||||
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
You are a Home Assistant issue duplicate detector. Your task is to identify TRUE DUPLICATES - issues that report the EXACT SAME problem, not just similar or related issues.
|
||||
|
||||
CRITICAL: An issue is ONLY a duplicate if:
|
||||
- It describes the SAME problem with the SAME root cause
|
||||
- Issues about the same integration but different problems are NOT duplicates
|
||||
- Issues with similar symptoms but different causes are NOT duplicates
|
||||
|
||||
Important considerations:
|
||||
- Open issues are more relevant than closed ones for duplicate detection
|
||||
- Recently updated issues may indicate ongoing work or discussion
|
||||
- Issues with more comments are generally more relevant and active
|
||||
- Older closed issues might be resolved differently than newer approaches
|
||||
- Consider the time between issues - very old issues may have different contexts
|
||||
|
||||
Rules:
|
||||
1. ONLY mark as duplicate if the issues describe IDENTICAL problems
|
||||
2. Look for issues that report the same problem or request the same functionality
|
||||
3. Different error messages = NOT a duplicate (even if same integration)
|
||||
4. For CLOSED issues, only mark as duplicate if they describe the EXACT same problem
|
||||
5. For OPEN issues, use a lower threshold (90%+ similarity)
|
||||
6. Prioritize issues with higher comment counts as they indicate more activity/relevance
|
||||
7. When in doubt, do NOT mark as duplicate
|
||||
8. Return ONLY a JSON array of issue numbers that are duplicates
|
||||
9. If no duplicates are found, return an empty array: []
|
||||
10. Maximum 5 potential duplicates, prioritize open issues with comments
|
||||
11. Consider the age of issues - prefer recent duplicates over very old ones
|
||||
|
||||
Example response format:
|
||||
[1234, 5678, 9012]
|
||||
|
||||
prompt: |
|
||||
Current issue (just created):
|
||||
Title: ${{ steps.extract.outputs.current_title }}
|
||||
Body: ${{ steps.extract.outputs.current_body }}
|
||||
|
||||
Other issues to compare against (each includes state, creation date, last update, and comment count):
|
||||
${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
Analyze these issues and identify which ones describe IDENTICAL problems and thus are duplicates of the current issue. When sorting them, consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
|
||||
|
||||
max-tokens: 100
|
||||
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
with:
|
||||
script: |
|
||||
const aiResponse = process.env.AI_RESPONSE;
|
||||
|
||||
console.log('Raw AI response:', JSON.stringify(aiResponse));
|
||||
|
||||
let duplicateNumbers = [];
|
||||
try {
|
||||
// Clean the response of any potential control characters
|
||||
const cleanResponse = aiResponse.trim().replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||
console.log('Cleaned AI response:', cleanResponse);
|
||||
|
||||
duplicateNumbers = JSON.parse(cleanResponse);
|
||||
|
||||
// Ensure it's an array and contains only numbers
|
||||
if (!Array.isArray(duplicateNumbers)) {
|
||||
console.log('AI response is not an array, trying to extract numbers');
|
||||
const numberMatches = cleanResponse.match(/\d+/g);
|
||||
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||
}
|
||||
|
||||
// Filter to only valid numbers
|
||||
duplicateNumbers = duplicateNumbers.filter(n => typeof n === 'number' && !isNaN(n));
|
||||
|
||||
} catch (error) {
|
||||
console.log('Failed to parse AI response as JSON:', error.message);
|
||||
console.log('Raw response:', aiResponse);
|
||||
|
||||
// Fallback: try to extract numbers from the response
|
||||
const numberMatches = aiResponse.match(/\d+/g);
|
||||
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||
console.log('Extracted numbers as fallback:', duplicateNumbers);
|
||||
}
|
||||
|
||||
if (!Array.isArray(duplicateNumbers) || duplicateNumbers.length === 0) {
|
||||
console.log('No duplicates detected by AI');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`AI detected ${duplicateNumbers.length} potential duplicates: ${duplicateNumbers.join(', ')}`);
|
||||
|
||||
// Get details of detected duplicates
|
||||
const similarIssues = JSON.parse(process.env.SIMILAR_ISSUES);
|
||||
const duplicates = similarIssues.filter(issue => duplicateNumbers.includes(issue.number));
|
||||
|
||||
if (duplicates.length === 0) {
|
||||
console.log('No matching issues found for detected numbers');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create comment with duplicate detection results
|
||||
const duplicateLinks = duplicates.map(issue => `- [#${issue.number}: ${issue.title}](${issue.url})`).join('\n');
|
||||
|
||||
const commentBody = [
|
||||
'<!-- workflow: detect-duplicate-issues -->',
|
||||
'### 🔍 **Potential duplicate detection**',
|
||||
'',
|
||||
'I\'ve analyzed similar issues and found the following potential duplicates:',
|
||||
'',
|
||||
duplicateLinks,
|
||||
'',
|
||||
'**What to do next:**',
|
||||
'1. Please review these issues to see if they match your issue',
|
||||
'2. If you find an existing issue that covers your problem:',
|
||||
' - Consider closing this issue',
|
||||
' - Add your findings or 👍 on the existing issue instead',
|
||||
'3. If your issue is different or adds new aspects, please clarify how it differs',
|
||||
'',
|
||||
'This helps keep our issues organized and ensures similar issues are consolidated for better visibility.',
|
||||
'',
|
||||
'*This message was generated automatically by our duplicate detection system.*'
|
||||
].join('\n');
|
||||
|
||||
try {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
body: commentBody
|
||||
});
|
||||
|
||||
console.log(`Posted duplicate detection comment with ${duplicates.length} potential duplicates`);
|
||||
|
||||
// Add the potential-duplicate label
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
labels: ['potential-duplicate']
|
||||
});
|
||||
|
||||
console.log('Added potential-duplicate label to the issue');
|
||||
} catch (error) {
|
||||
core.error('Failed to post duplicate detection comment or add label:', error.message);
|
||||
if (error.status === 403) {
|
||||
core.error('Permission denied or rate limit exceeded');
|
||||
}
|
||||
// Don't throw - we've done the analysis, just couldn't post the result
|
||||
}
|
193
.github/workflows/detect-non-english-issues.yml
vendored
193
.github/workflows/detect-non-english-issues.yml
vendored
@@ -1,193 +0,0 @@
|
||||
name: Auto-detect non-English issues
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-language:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_USER_TYPE: ${{ github.event.issue.user.type }}
|
||||
with:
|
||||
script: |
|
||||
// Get the issue details from environment variables
|
||||
const issueNumber = process.env.ISSUE_NUMBER;
|
||||
const issueTitle = process.env.ISSUE_TITLE || '';
|
||||
const issueBody = process.env.ISSUE_BODY || '';
|
||||
const userType = process.env.ISSUE_USER_TYPE;
|
||||
|
||||
// Skip language detection for bot users
|
||||
if (userType === 'Bot') {
|
||||
console.log('Skipping language detection for bot user');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Checking language for issue #${issueNumber}`);
|
||||
console.log(`Title: ${issueTitle}`);
|
||||
|
||||
// Combine title and body for language detection
|
||||
const fullText = `${issueTitle}\n\n${issueBody}`;
|
||||
|
||||
// Check if the text is too short to reliably detect language
|
||||
if (fullText.trim().length < 20) {
|
||||
console.log('Text too short for reliable language detection');
|
||||
core.setOutput('should_continue', 'false'); // Skip processing for very short text
|
||||
return;
|
||||
}
|
||||
|
||||
core.setOutput('issue_number', issueNumber);
|
||||
core.setOutput('issue_text', fullText);
|
||||
core.setOutput('should_continue', 'true');
|
||||
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
You are a language detection system. Your task is to determine if the provided text is written in English or another language.
|
||||
|
||||
Rules:
|
||||
1. Analyze the text and determine the primary language of the USER'S DESCRIPTION only
|
||||
2. IGNORE markdown headers (lines starting with #, ##, ###, etc.) as these are from issue templates, not user input
|
||||
3. IGNORE all code blocks (text between ``` or ` markers) as they may contain system-generated error messages in other languages
|
||||
4. IGNORE error messages, logs, and system output even if not in code blocks - these often appear in the user's system language
|
||||
5. Consider technical terms, code snippets, URLs, and file paths as neutral (they don't indicate non-English)
|
||||
6. Focus ONLY on the actual sentences and descriptions written by the user explaining their issue
|
||||
7. If the user's explanation/description is in English but includes non-English error messages or logs, consider it ENGLISH
|
||||
8. Return ONLY a JSON object with two fields:
|
||||
- "is_english": boolean (true if the user's description is primarily in English, false otherwise)
|
||||
- "detected_language": string (the name of the detected language, e.g., "English", "Spanish", "Chinese", etc.)
|
||||
9. Be lenient - if the user's explanation is in English with non-English system output, it's still English
|
||||
10. Common programming terms, error messages, and technical jargon should not be considered as non-English
|
||||
11. If you cannot reliably determine the language, set detected_language to "undefined"
|
||||
|
||||
Example response:
|
||||
{"is_english": false, "detected_language": "Spanish"}
|
||||
|
||||
prompt: |
|
||||
Please analyze the following issue text and determine if it is written in English:
|
||||
|
||||
${{ steps.detect_language.outputs.issue_text }}
|
||||
|
||||
max-tokens: 50
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
with:
|
||||
script: |
|
||||
const issueNumber = parseInt(process.env.ISSUE_NUMBER);
|
||||
const aiResponse = process.env.AI_RESPONSE;
|
||||
|
||||
console.log('AI language detection response:', aiResponse);
|
||||
|
||||
let languageResult;
|
||||
try {
|
||||
languageResult = JSON.parse(aiResponse.trim());
|
||||
|
||||
// Validate the response structure
|
||||
if (!languageResult || typeof languageResult.is_english !== 'boolean') {
|
||||
throw new Error('Invalid response structure');
|
||||
}
|
||||
} catch (error) {
|
||||
core.error(`Failed to parse AI response: ${error.message}`);
|
||||
console.log('Raw AI response:', aiResponse);
|
||||
|
||||
// Log more details for debugging
|
||||
core.warning('Defaulting to English due to parsing error');
|
||||
|
||||
// Default to English if we can't parse the response
|
||||
return;
|
||||
}
|
||||
|
||||
if (languageResult.is_english) {
|
||||
console.log('Issue is in English, no action needed');
|
||||
return;
|
||||
}
|
||||
|
||||
// If language is undefined or not detected, skip processing
|
||||
if (!languageResult.detected_language || languageResult.detected_language === 'undefined') {
|
||||
console.log('Language could not be determined, skipping processing');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Issue detected as non-English: ${languageResult.detected_language}`);
|
||||
|
||||
// Post comment explaining the language requirement
|
||||
const commentBody = [
|
||||
'<!-- workflow: detect-non-english-issues -->',
|
||||
'### 🌐 Non-English issue detected',
|
||||
'',
|
||||
`This issue appears to be written in **${languageResult.detected_language}** rather than English.`,
|
||||
'',
|
||||
'The Home Assistant project uses English as the primary language for issues to ensure that everyone in our international community can participate and help resolve issues. This allows any of our thousands of contributors to jump in and provide assistance.',
|
||||
'',
|
||||
'**What to do:**',
|
||||
'1. Re-create the issue using the English language',
|
||||
'2. If you need help with translation, consider using:',
|
||||
' - Translation tools like Google Translate',
|
||||
' - AI assistants like ChatGPT or Claude',
|
||||
'',
|
||||
'This helps our community provide the best possible support and ensures your issue gets the attention it deserves from our global contributor base.',
|
||||
'',
|
||||
'Thank you for your understanding! 🙏'
|
||||
].join('\n');
|
||||
|
||||
try {
|
||||
// Add comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body: commentBody
|
||||
});
|
||||
|
||||
console.log('Posted language requirement comment');
|
||||
|
||||
// Add non-english label
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ['non-english']
|
||||
});
|
||||
|
||||
console.log('Added non-english label');
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
state: 'closed',
|
||||
state_reason: 'not_planned'
|
||||
});
|
||||
|
||||
console.log('Closed the issue');
|
||||
|
||||
} catch (error) {
|
||||
core.error('Failed to process non-English issue:', error.message);
|
||||
if (error.status === 403) {
|
||||
core.error('Permission denied or rate limit exceeded');
|
||||
}
|
||||
}
|
84
.github/workflows/restrict-task-creation.yml
vendored
84
.github/workflows/restrict-task-creation.yml
vendored
@@ -1,84 +0,0 @@
|
||||
name: Restrict task creation
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
check-authorization:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if this is a Task issue type (from the issue form)
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
// First check if user is an organization member
|
||||
try {
|
||||
await github.rest.orgs.checkMembershipForUser({
|
||||
org: 'home-assistant',
|
||||
username: issueAuthor
|
||||
});
|
||||
console.log(`✅ ${issueAuthor} is an organization member`);
|
||||
return; // Authorized, no need to check further
|
||||
} catch (error) {
|
||||
console.log(`ℹ️ ${issueAuthor} is not an organization member, checking codeowners...`);
|
||||
}
|
||||
|
||||
// If not an org member, check if they're a codeowner
|
||||
try {
|
||||
// Fetch CODEOWNERS file from the repository
|
||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
path: 'CODEOWNERS',
|
||||
ref: 'dev'
|
||||
});
|
||||
|
||||
// Decode the content (it's base64 encoded)
|
||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf-8');
|
||||
|
||||
// Check if the issue author is mentioned in CODEOWNERS
|
||||
// GitHub usernames in CODEOWNERS are prefixed with @
|
||||
if (codeownersContent.includes(`@${issueAuthor}`)) {
|
||||
console.log(`✅ ${issueAuthor} is a integration code owner`);
|
||||
return; // Authorized
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error checking CODEOWNERS:', error);
|
||||
}
|
||||
|
||||
// If we reach here, user is not authorized
|
||||
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
|
||||
|
||||
// Close the issue with a comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
|
||||
`Task issues are restricted to Open Home Foundation staff, authorized contributors, and integration code owners.\n\n` +
|
||||
`If you would like to:\n` +
|
||||
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/core/issues/new?template=bug_report.yml)\n` +
|
||||
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
|
||||
`If you believe you should have access to create Task issues, please contact the maintainers.`
|
||||
});
|
||||
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Add a label to indicate this was auto-closed
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['auto-closed']
|
||||
});
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
26
.github/workflows/wheels.yml
vendored
26
.github/workflows/wheels.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -135,20 +135,20 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -184,25 +184,25 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
6
.gitignore
vendored
6
.gitignore
vendored
@@ -137,8 +137,4 @@ tmp_cache
|
||||
.ropeproject
|
||||
|
||||
# Will be created from script/split_tests.py
|
||||
pytest_buckets.txt
|
||||
|
||||
# AI tooling
|
||||
.claude
|
||||
|
||||
pytest_buckets.txt
|
@@ -1,8 +1,8 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.1
|
||||
rev: v0.11.0
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
- id: ruff
|
||||
args:
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
@@ -18,7 +18,7 @@ repos:
|
||||
exclude_types: [csv, json, html]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
@@ -30,7 +30,7 @@ repos:
|
||||
- --branch=master
|
||||
- --branch=rc
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.37.1
|
||||
rev: v1.35.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
@@ -53,7 +53,6 @@ homeassistant.components.air_quality.*
|
||||
homeassistant.components.airgradient.*
|
||||
homeassistant.components.airly.*
|
||||
homeassistant.components.airnow.*
|
||||
homeassistant.components.airos.*
|
||||
homeassistant.components.airq.*
|
||||
homeassistant.components.airthings.*
|
||||
homeassistant.components.airthings_ble.*
|
||||
@@ -66,9 +65,7 @@ homeassistant.components.aladdin_connect.*
|
||||
homeassistant.components.alarm_control_panel.*
|
||||
homeassistant.components.alert.*
|
||||
homeassistant.components.alexa.*
|
||||
homeassistant.components.alexa_devices.*
|
||||
homeassistant.components.alpha_vantage.*
|
||||
homeassistant.components.altruist.*
|
||||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
homeassistant.components.ambient_network.*
|
||||
@@ -273,7 +270,6 @@ homeassistant.components.image_processing.*
|
||||
homeassistant.components.image_upload.*
|
||||
homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
@@ -307,10 +303,10 @@ homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.libre_hardware_monitor.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
homeassistant.components.linear_garage_door.*
|
||||
homeassistant.components.linkplay.*
|
||||
homeassistant.components.litejet.*
|
||||
homeassistant.components.litterrobot.*
|
||||
@@ -378,13 +374,10 @@ homeassistant.components.onedrive.*
|
||||
homeassistant.components.onewire.*
|
||||
homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.open_router.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
homeassistant.components.opnsense.*
|
||||
homeassistant.components.opower.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
@@ -392,7 +385,6 @@ homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.paperless_ngx.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
@@ -442,6 +434,7 @@ homeassistant.components.roku.*
|
||||
homeassistant.components.romy.*
|
||||
homeassistant.components.rpi_power.*
|
||||
homeassistant.components.rss_feed_template.*
|
||||
homeassistant.components.rtsp_to_webrtc.*
|
||||
homeassistant.components.russound_rio.*
|
||||
homeassistant.components.ruuvi_gateway.*
|
||||
homeassistant.components.ruuvitag_ble.*
|
||||
@@ -460,7 +453,6 @@ homeassistant.components.sensorpush_cloud.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.sftp_storage.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
@@ -469,7 +461,6 @@ homeassistant.components.simplisafe.*
|
||||
homeassistant.components.siren.*
|
||||
homeassistant.components.skybell.*
|
||||
homeassistant.components.slack.*
|
||||
homeassistant.components.sleep_as_android.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
@@ -505,12 +496,10 @@ homeassistant.components.tag.*
|
||||
homeassistant.components.tailscale.*
|
||||
homeassistant.components.tailwind.*
|
||||
homeassistant.components.tami4.*
|
||||
homeassistant.components.tankerkoenig.*
|
||||
homeassistant.components.tautulli.*
|
||||
homeassistant.components.tcp.*
|
||||
homeassistant.components.technove.*
|
||||
homeassistant.components.tedee.*
|
||||
homeassistant.components.telegram_bot.*
|
||||
homeassistant.components.text.*
|
||||
homeassistant.components.thethingsnetwork.*
|
||||
homeassistant.components.threshold.*
|
||||
@@ -541,7 +530,6 @@ homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptime_kuma.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
@@ -551,7 +539,6 @@ homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.volvo.*
|
||||
homeassistant.components.wake_on_lan.*
|
||||
homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
|
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
||||
{
|
||||
"label": "Ruff",
|
||||
"type": "shell",
|
||||
"command": "pre-commit run ruff-check --all-files",
|
||||
"command": "pre-commit run ruff --all-files",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
172
CODEOWNERS
generated
172
CODEOWNERS
generated
@@ -46,8 +46,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
/tests/components/acmeda/ @atmurray
|
||||
/homeassistant/components/adax/ @danielhiversen @lazytarget
|
||||
/tests/components/adax/ @danielhiversen @lazytarget
|
||||
/homeassistant/components/adax/ @danielhiversen
|
||||
/tests/components/adax/ @danielhiversen
|
||||
/homeassistant/components/adguard/ @frenck
|
||||
/tests/components/adguard/ @frenck
|
||||
/homeassistant/components/ads/ @mrpasztoradam
|
||||
@@ -57,8 +57,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/aemet/ @Noltari
|
||||
/homeassistant/components/agent_dvr/ @ispysoftware
|
||||
/tests/components/agent_dvr/ @ispysoftware
|
||||
/homeassistant/components/ai_task/ @home-assistant/core
|
||||
/tests/components/ai_task/ @home-assistant/core
|
||||
/homeassistant/components/air_quality/ @home-assistant/core
|
||||
/tests/components/air_quality/ @home-assistant/core
|
||||
/homeassistant/components/airgradient/ @airgradienthq @joostlek
|
||||
@@ -67,8 +65,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airly/ @bieniu
|
||||
/homeassistant/components/airnow/ @asymworks
|
||||
/tests/components/airnow/ @asymworks
|
||||
/homeassistant/components/airos/ @CoMPaTech
|
||||
/tests/components/airos/ @CoMPaTech
|
||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||
/tests/components/airq/ @Sibgatulin @dl2080
|
||||
/homeassistant/components/airthings/ @danielhiversen @LaStrada
|
||||
@@ -87,18 +83,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airzone/ @Noltari
|
||||
/homeassistant/components/airzone_cloud/ @Noltari
|
||||
/tests/components/airzone_cloud/ @Noltari
|
||||
/homeassistant/components/aladdin_connect/ @swcloudgenie
|
||||
/tests/components/aladdin_connect/ @swcloudgenie
|
||||
/homeassistant/components/alarm_control_panel/ @home-assistant/core
|
||||
/tests/components/alarm_control_panel/ @home-assistant/core
|
||||
/homeassistant/components/alert/ @home-assistant/core @frenck
|
||||
/tests/components/alert/ @home-assistant/core @frenck
|
||||
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/homeassistant/components/alexa_devices/ @chemelli74
|
||||
/tests/components/alexa_devices/ @chemelli74
|
||||
/homeassistant/components/altruist/ @airalab @LoSk-p
|
||||
/tests/components/altruist/ @airalab @LoSk-p
|
||||
/homeassistant/components/amazon_polly/ @jschlyter
|
||||
/homeassistant/components/amberelectric/ @madpilot
|
||||
/tests/components/amberelectric/ @madpilot
|
||||
@@ -154,12 +144,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/arve/ @ikalnyi
|
||||
/homeassistant/components/aseko_pool_live/ @milanmeu
|
||||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/tests/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69
|
||||
/homeassistant/components/atag/ @MatsNL
|
||||
/tests/components/atag/ @MatsNL
|
||||
/homeassistant/components/aten_pe/ @mtdcr
|
||||
@@ -212,8 +202,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||
/tests/components/blink/ @fronzbot @mkmer
|
||||
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
||||
/tests/components/blue_current/ @Floris272 @gleeuwen
|
||||
/homeassistant/components/bluemaestro/ @bdraco
|
||||
/tests/components/bluemaestro/ @bdraco
|
||||
/homeassistant/components/blueprint/ @home-assistant/core
|
||||
@@ -298,8 +288,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/configurator/ @home-assistant/core
|
||||
/homeassistant/components/control4/ @lawtancool
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
@@ -313,7 +303,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||
/homeassistant/components/cups/ @fabaff
|
||||
/tests/components/cups/ @fabaff
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
/tests/components/daikin/ @fredrike
|
||||
/homeassistant/components/date/ @home-assistant/core
|
||||
@@ -335,8 +324,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/demo/ @home-assistant/core
|
||||
/homeassistant/components/denonavr/ @ol-iver @starkillerOG
|
||||
/tests/components/denonavr/ @ol-iver @starkillerOG
|
||||
/homeassistant/components/derivative/ @afaucogney @karwosts
|
||||
/tests/components/derivative/ @afaucogney @karwosts
|
||||
/homeassistant/components/derivative/ @afaucogney
|
||||
/tests/components/derivative/ @afaucogney
|
||||
/homeassistant/components/devialet/ @fwestenberg
|
||||
/tests/components/devialet/ @fwestenberg
|
||||
/homeassistant/components/device_automation/ @home-assistant/core
|
||||
@@ -424,8 +413,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/emby/ @mezz64
|
||||
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
||||
/tests/components/emoncms/ @borpin @alexandrecuer
|
||||
/homeassistant/components/emoncms_history/ @alexandrecuer
|
||||
/tests/components/emoncms_history/ @alexandrecuer
|
||||
/homeassistant/components/emonitor/ @bdraco
|
||||
/tests/components/emonitor/ @bdraco
|
||||
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
||||
@@ -442,8 +429,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
@@ -458,16 +445,18 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/eq3btsmart/ @eulemitkeule @dbuezas
|
||||
/homeassistant/components/escea/ @lazdavila
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/eufylife_ble/ @bdr99
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
/tests/components/event/ @home-assistant/core
|
||||
/homeassistant/components/evil_genius_labs/ @balloob
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26
|
||||
/tests/components/ezviz/ @RenierM26
|
||||
/homeassistant/components/ezviz/ @RenierM26 @baqs
|
||||
/tests/components/ezviz/ @RenierM26 @baqs
|
||||
/homeassistant/components/faa_delays/ @ntilley905
|
||||
/tests/components/faa_delays/ @ntilley905
|
||||
/homeassistant/components/fan/ @home-assistant/core
|
||||
@@ -513,8 +502,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/forked_daapd/ @uvjustin
|
||||
/tests/components/forked_daapd/ @uvjustin
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @Foscam-wangzhengyu
|
||||
/tests/components/foscam/ @Foscam-wangzhengyu
|
||||
/homeassistant/components/foscam/ @krmarien
|
||||
/tests/components/foscam/ @krmarien
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
@@ -648,8 +637,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
/tests/components/homeassistant_alerts/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/tests/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_green/ @home-assistant/core
|
||||
/tests/components/homeassistant_green/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_hardware/ @home-assistant/core
|
||||
@@ -678,8 +665,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
/tests/components/huawei_lte/ @scop @fphammerle
|
||||
/homeassistant/components/hue/ @marcelveldt
|
||||
/tests/components/hue/ @marcelveldt
|
||||
/homeassistant/components/hue/ @balloob @marcelveldt
|
||||
/tests/components/hue/ @balloob @marcelveldt
|
||||
/homeassistant/components/huisbaasje/ @dennisschroer
|
||||
/tests/components/huisbaasje/ @dennisschroer
|
||||
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
@@ -690,8 +677,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/husqvarna_automower/ @Thomas55555
|
||||
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||
/homeassistant/components/huum/ @frwickst @vincentwolsink
|
||||
/tests/components/huum/ @frwickst @vincentwolsink
|
||||
/homeassistant/components/huum/ @frwickst
|
||||
/tests/components/huum/ @frwickst
|
||||
/homeassistant/components/hvv_departures/ @vigonotion
|
||||
/tests/components/hvv_departures/ @vigonotion
|
||||
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
|
||||
@@ -723,8 +710,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/immich/ @mib1185
|
||||
/tests/components/immich/ @mib1185
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
@@ -751,8 +736,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intellifire/ @jeeftor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
@@ -794,6 +779,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/jellyfin/ @RunC0deRun @ctalkington
|
||||
/homeassistant/components/jewish_calendar/ @tsvi
|
||||
/tests/components/jewish_calendar/ @tsvi
|
||||
/homeassistant/components/juicenet/ @jesserockz
|
||||
/tests/components/juicenet/ @jesserockz
|
||||
/homeassistant/components/justnimbus/ @kvanzuijlen
|
||||
/tests/components/justnimbus/ @kvanzuijlen
|
||||
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
||||
@@ -860,14 +847,14 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/libre_hardware_monitor/ @Sab44
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
/tests/components/lifx/ @Djelibeybi
|
||||
/homeassistant/components/light/ @home-assistant/core
|
||||
/tests/components/light/ @home-assistant/core
|
||||
/homeassistant/components/linear_garage_door/ @IceBotYT
|
||||
/tests/components/linear_garage_door/ @IceBotYT
|
||||
/homeassistant/components/linkplay/ @Velleman
|
||||
/tests/components/linkplay/ @Velleman
|
||||
/homeassistant/components/linux_battery/ @fabaff
|
||||
@@ -1108,8 +1095,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/onvif/ @hunterjm @jterrace
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
/tests/components/openai_conversation/ @balloob
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openexchangerates/ @MartinHjelmare
|
||||
@@ -1124,8 +1111,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/opentherm_gw/ @mvn23
|
||||
/homeassistant/components/openuv/ @bachya
|
||||
/tests/components/openuv/ @bachya
|
||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi
|
||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi
|
||||
/homeassistant/components/opnsense/ @mtreinish
|
||||
/tests/components/opnsense/ @mtreinish
|
||||
/homeassistant/components/opower/ @tronikos
|
||||
@@ -1151,8 +1138,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/paperless_ngx/ @fvgarrel
|
||||
/tests/components/paperless_ngx/ @fvgarrel
|
||||
/homeassistant/components/peblar/ @frenck
|
||||
/tests/components/peblar/ @frenck
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
@@ -1175,8 +1160,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
/tests/components/plaato/ @JohNan
|
||||
/homeassistant/components/playstation_network/ @jackjpowell @tr4nt0r
|
||||
/tests/components/playstation_network/ @jackjpowell @tr4nt0r
|
||||
/homeassistant/components/plex/ @jjlawren
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
@@ -1185,8 +1168,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/pooldose/ @lmaertin
|
||||
/tests/components/pooldose/ @lmaertin
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
@@ -1195,8 +1176,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/homeassistant/components/private_ble_device/ @Jc2k
|
||||
/tests/components/private_ble_device/ @Jc2k
|
||||
/homeassistant/components/probe_plus/ @pantherale0
|
||||
/tests/components/probe_plus/ @pantherale0
|
||||
/homeassistant/components/profiler/ @bdraco
|
||||
/tests/components/profiler/ @bdraco
|
||||
/homeassistant/components/progettihwsw/ @ardaseremet
|
||||
@@ -1208,6 +1187,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
@@ -1241,7 +1222,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/qnap_qsw/ @Noltari
|
||||
/tests/components/qnap_qsw/ @Noltari
|
||||
/homeassistant/components/quantum_gateway/ @cisasteelersfan
|
||||
/tests/components/quantum_gateway/ @cisasteelersfan
|
||||
/homeassistant/components/qvr_pro/ @oblogic7
|
||||
/homeassistant/components/qwikswitch/ @kellerza
|
||||
/tests/components/qwikswitch/ @kellerza
|
||||
@@ -1284,8 +1264,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rehlko/ @bdraco @peterager
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555 @allenporter
|
||||
/tests/components/remote_calendar/ @Thomas55555 @allenporter
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
/tests/components/remote_calendar/ @Thomas55555
|
||||
/homeassistant/components/renault/ @epenet
|
||||
/tests/components/renault/ @epenet
|
||||
/homeassistant/components/renson/ @jimmyd-be
|
||||
@@ -1301,8 +1281,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rflink/ @javicalle
|
||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/tests/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/homeassistant/components/rhasspy/ @synesthesiam
|
||||
/tests/components/rhasspy/ @synesthesiam
|
||||
/homeassistant/components/rhasspy/ @balloob @synesthesiam
|
||||
/tests/components/rhasspy/ @balloob @synesthesiam
|
||||
/homeassistant/components/ridwell/ @bachya
|
||||
/tests/components/ridwell/ @bachya
|
||||
/homeassistant/components/ring/ @sdb9696
|
||||
@@ -1327,6 +1307,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rpi_power/ @shenxn @swetoast
|
||||
/homeassistant/components/rss_feed_template/ @home-assistant/core
|
||||
/tests/components/rss_feed_template/ @home-assistant/core
|
||||
/homeassistant/components/rtsp_to_webrtc/ @allenporter
|
||||
/tests/components/rtsp_to_webrtc/ @allenporter
|
||||
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/homeassistant/components/russound_rio/ @noahhusby
|
||||
@@ -1390,14 +1372,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/seventeentrack/ @shaiu
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sftp_storage/ @maretodoric
|
||||
/tests/components/sftp_storage/ @maretodoric
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
/tests/components/shell_command/ @home-assistant/core
|
||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shodan/ @fabaff
|
||||
/homeassistant/components/sia/ @eavanvalkenburg
|
||||
/tests/components/sia/ @eavanvalkenburg
|
||||
@@ -1421,8 +1401,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/skybell/ @tkdrob
|
||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||
/tests/components/slack/ @tkdrob @fletcherau
|
||||
/homeassistant/components/sleep_as_android/ @tr4nt0r
|
||||
/tests/components/sleep_as_android/ @tr4nt0r
|
||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||
/homeassistant/components/slide/ @ualex73
|
||||
@@ -1434,8 +1412,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/homeassistant/components/smappee/ @bsmappee
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smarla/ @explicatis @rlint-explicatis
|
||||
/tests/components/smarla/ @explicatis @rlint-explicatis
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
@@ -1510,8 +1486,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @home-assistant/core
|
||||
/tests/components/sun/ @home-assistant/core
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
@@ -1524,8 +1500,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/switch_as_x/ @home-assistant/core
|
||||
/homeassistant/components/switchbee/ @jafar-atili
|
||||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
@@ -1544,8 +1520,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @home-assistant/core
|
||||
/tests/components/tag/ @home-assistant/core
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
/tests/components/tailscale/ @frenck
|
||||
/homeassistant/components/tailwind/ @frenck
|
||||
@@ -1563,12 +1539,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/technove/ @Moustachauve
|
||||
/homeassistant/components/tedee/ @patrickhilker @zweckj
|
||||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/telegram_bot/ @hanwg
|
||||
/tests/components/telegram_bot/ @hanwg
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @Petro31 @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @home-assistant/core
|
||||
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
@@ -1594,8 +1568,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tile/ @bachya
|
||||
/homeassistant/components/tilt_ble/ @apt-itude
|
||||
/tests/components/tilt_ble/ @apt-itude
|
||||
/homeassistant/components/tilt_pi/ @michaelheyman
|
||||
/tests/components/tilt_pi/ @michaelheyman
|
||||
/homeassistant/components/time/ @home-assistant/core
|
||||
/tests/components/time/ @home-assistant/core
|
||||
/homeassistant/components/time_date/ @fabaff
|
||||
@@ -1605,8 +1577,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/todo/ @home-assistant/core
|
||||
/homeassistant/components/todoist/ @boralyl
|
||||
/tests/components/todoist/ @boralyl
|
||||
/homeassistant/components/togrill/ @elupus
|
||||
/tests/components/togrill/ @elupus
|
||||
/homeassistant/components/tolo/ @MatthiasLohr
|
||||
/tests/components/tolo/ @MatthiasLohr
|
||||
/homeassistant/components/tomorrowio/ @raman325 @lymanepp
|
||||
@@ -1621,6 +1591,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tplink_omada/ @MarkGodwin
|
||||
/homeassistant/components/traccar/ @ludeeus
|
||||
/tests/components/traccar/ @ludeeus
|
||||
/homeassistant/components/traccar_server/ @ludeeus
|
||||
/tests/components/traccar_server/ @ludeeus
|
||||
/homeassistant/components/trace/ @home-assistant/core
|
||||
/tests/components/trace/ @home-assistant/core
|
||||
/homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu
|
||||
@@ -1668,8 +1640,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/upnp/ @StevenLooman
|
||||
/homeassistant/components/uptime/ @frenck
|
||||
/tests/components/uptime/ @frenck
|
||||
/homeassistant/components/uptime_kuma/ @tr4nt0r
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
@@ -1686,19 +1656,17 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||
/homeassistant/components/valve/ @home-assistant/core
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/vegehub/ @ghowevege
|
||||
/tests/components/vegehub/ @ghowevege
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
@@ -1710,14 +1678,14 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/homeassistant/components/voip/ @synesthesiam @jaminh
|
||||
/tests/components/voip/ @synesthesiam @jaminh
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
/tests/components/volvo/ @thomasddn
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
/tests/components/volvooncall/ @molobrakos
|
||||
/homeassistant/components/vulcan/ @Antoni-Czaplicki
|
||||
/tests/components/vulcan/ @Antoni-Czaplicki
|
||||
/homeassistant/components/wake_on_lan/ @ntilley905
|
||||
/tests/components/wake_on_lan/ @ntilley905
|
||||
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
||||
@@ -1768,8 +1736,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/wirelesstag/ @sergeymaysak
|
||||
/homeassistant/components/withings/ @joostlek
|
||||
/tests/components/withings/ @joostlek
|
||||
/homeassistant/components/wiz/ @sbidy @arturpragacz
|
||||
/tests/components/wiz/ @sbidy @arturpragacz
|
||||
/homeassistant/components/wiz/ @sbidy
|
||||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
@@ -1782,8 +1750,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/wyoming/ @balloob @synesthesiam
|
||||
/tests/components/wyoming/ @balloob @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
|
@@ -14,8 +14,5 @@ Still interested? Then you should take a peek at the [developer documentation](h
|
||||
|
||||
## Feature suggestions
|
||||
|
||||
If you want to suggest a new feature for Home Assistant (e.g. new integrations), please [start a discussion](https://github.com/orgs/home-assistant/discussions) on GitHub.
|
||||
|
||||
## Issue Tracker
|
||||
|
||||
If you want to report an issue, please [create an issue](https://github.com/home-assistant/core/issues) on GitHub.
|
||||
If you want to suggest a new feature for Home Assistant (e.g., new integrations), please open a thread in our [Community Forum: Feature Requests](https://community.home-assistant.io/c/feature-requests).
|
||||
We use [GitHub for tracking issues](https://github.com/home-assistant/core/issues), not for tracking feature requests.
|
||||
|
2
Dockerfile
generated
2
Dockerfile
generated
@@ -31,7 +31,7 @@ RUN \
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.8.9
|
||||
RUN pip3 install uv==0.7.1
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@@ -1,9 +1,18 @@
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/base:debian
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Uninstall pre-installed formatting and linting tools
|
||||
# They would conflict with our pinned versions
|
||||
RUN \
|
||||
apt-get update \
|
||||
pipx uninstall pydocstyle \
|
||||
&& pipx uninstall pycodestyle \
|
||||
&& pipx uninstall mypy \
|
||||
&& pipx uninstall pylint
|
||||
|
||||
RUN \
|
||||
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
# Additional library needed by some tests and accordingly by VScode Tests Discovery
|
||||
bluez \
|
||||
@@ -23,18 +32,21 @@ RUN \
|
||||
libxml2 \
|
||||
git \
|
||||
cmake \
|
||||
autoconf \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
RUN uv python install 3.13.2
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
|
||||
&& uv pip install --system -e hass-release/ \
|
||||
&& chown -R vscode /usr/src/hass-release/data
|
||||
|
||||
USER vscode
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
@@ -43,10 +55,6 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
@@ -57,4 +65,4 @@ RUN uv pip install -r requirements_test.txt
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL=/bin/bash
|
||||
ENV SHELL /bin/bash
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -38,7 +38,8 @@ def validate_python() -> None:
|
||||
|
||||
def ensure_config_path(config_dir: str) -> None:
|
||||
"""Validate the configuration directory."""
|
||||
from . import config as config_util # noqa: PLC0415
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import config as config_util
|
||||
|
||||
lib_dir = os.path.join(config_dir, "deps")
|
||||
|
||||
@@ -79,7 +80,8 @@ def ensure_config_path(config_dir: str) -> None:
|
||||
|
||||
def get_arguments() -> argparse.Namespace:
|
||||
"""Get parsed passed in arguments."""
|
||||
from . import config as config_util # noqa: PLC0415
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import config as config_util
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Home Assistant: Observe, Control, Automate.",
|
||||
@@ -175,7 +177,8 @@ def main() -> int:
|
||||
validate_os()
|
||||
|
||||
if args.script is not None:
|
||||
from . import scripts # noqa: PLC0415
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import scripts
|
||||
|
||||
return scripts.run(args.script)
|
||||
|
||||
@@ -185,44 +188,39 @@ def main() -> int:
|
||||
|
||||
ensure_config_path(config_dir)
|
||||
|
||||
from . import config, runner # noqa: PLC0415
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import config, runner
|
||||
|
||||
# Ensure only one instance runs per config directory
|
||||
with runner.ensure_single_execution(config_dir) as single_execution_lock:
|
||||
# Check if another instance is already running
|
||||
if single_execution_lock.exit_code is not None:
|
||||
return single_execution_lock.exit_code
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
check_threads()
|
||||
|
||||
check_threads()
|
||||
|
||||
return exit_code
|
||||
return exit_code
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -120,9 +120,6 @@ class AuthStore:
|
||||
|
||||
new_user = models.User(**kwargs)
|
||||
|
||||
while new_user.id in self._users:
|
||||
new_user = models.User(**kwargs)
|
||||
|
||||
self._users[new_user.id] = new_user
|
||||
|
||||
if credentials is None:
|
||||
|
@@ -27,7 +27,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.9.0"]
|
||||
REQUIREMENTS = ["pyotp==2.8.0"]
|
||||
|
||||
CONF_MESSAGE = "message"
|
||||
|
||||
@@ -52,28 +52,28 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def _generate_secret() -> str:
|
||||
"""Generate a secret."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
return str(pyotp.random_base32())
|
||||
|
||||
|
||||
def _generate_random() -> int:
|
||||
"""Generate a 32 digit number."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
|
||||
|
||||
|
||||
def _generate_otp(secret: str, count: int) -> str:
|
||||
"""Generate one time password."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
return str(pyotp.HOTP(secret).at(count))
|
||||
|
||||
|
||||
def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
||||
"""Verify one time password."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
return bool(pyotp.HOTP(secret).verify(otp, count))
|
||||
|
||||
|
@@ -20,7 +20,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.9.0", "PyQRCode==1.2.1"]
|
||||
REQUIREMENTS = ["pyotp==2.8.0", "PyQRCode==1.2.1"]
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
@@ -37,7 +37,7 @@ DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
||||
|
||||
def _generate_qr_code(data: str) -> str:
|
||||
"""Generate a base64 PNG string represent QR Code image of data."""
|
||||
import pyqrcode # noqa: PLC0415
|
||||
import pyqrcode # pylint: disable=import-outside-toplevel
|
||||
|
||||
qr_code = pyqrcode.create(data)
|
||||
|
||||
@@ -59,7 +59,7 @@ def _generate_qr_code(data: str) -> str:
|
||||
|
||||
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
|
||||
"""Generate a secret, url, and QR code."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
ota_secret = pyotp.random_base32()
|
||||
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
|
||||
@@ -107,7 +107,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
|
||||
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
|
||||
"""Create a ota_secret for user."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
ota_secret: str = secret or pyotp.random_base32()
|
||||
|
||||
@@ -163,7 +163,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
|
||||
def _validate_2fa(self, user_id: str, code: str) -> bool:
|
||||
"""Validate two factor authentication code."""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
|
||||
# even we cannot find user, we still do verify
|
||||
@@ -196,7 +196,7 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
Return self.async_create_entry(data={'result': result}) if finish.
|
||||
"""
|
||||
import pyotp # noqa: PLC0415
|
||||
import pyotp # pylint: disable=import-outside-toplevel
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
|
@@ -33,10 +33,7 @@ class AuthFlowContext(FlowContext, total=False):
|
||||
redirect_uri: str
|
||||
|
||||
|
||||
class AuthFlowResult(FlowResult[AuthFlowContext, tuple[str, str]], total=False):
|
||||
"""Typed result dict for auth flow."""
|
||||
|
||||
result: Credentials # Only present if type is CREATE_ENTRY
|
||||
AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
|
29
homeassistant/backports/enum.py
Normal file
29
homeassistant/backports/enum.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Enum backports from standard lib.
|
||||
|
||||
This file contained the backport of the StrEnum of Python 3.11.
|
||||
|
||||
Since we have dropped support for Python 3.10, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum as _StrEnum
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
|
||||
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
31
homeassistant/backports/functools.py
Normal file
31
homeassistant/backports/functools.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Functools backports from standard lib.
|
||||
|
||||
This file contained the backport of the cached_property implementation of Python 3.12.
|
||||
|
||||
Since we have dropped support for Python 3.11, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# pylint: disable-next=hass-deprecated-import
|
||||
from functools import cached_property as _cached_property, partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
|
||||
_DEPRECATED_cached_property = DeprecatedAlias(
|
||||
_cached_property, "functools.cached_property", "2025.5"
|
||||
)
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
@@ -75,8 +75,8 @@ from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
backup,
|
||||
category_registry,
|
||||
condition,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
entity,
|
||||
@@ -89,7 +89,6 @@ from .helpers import (
|
||||
restore_state,
|
||||
template,
|
||||
translation,
|
||||
trigger,
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
@@ -172,6 +171,8 @@ FRONTEND_INTEGRATIONS = {
|
||||
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
|
||||
# The substages preceding it should also have no timeout, until we ensure that the recorder
|
||||
# is not accidentally promoted as a dependency of any of the integrations in them.
|
||||
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
|
||||
STAGE_0_INTEGRATIONS = (
|
||||
# Load logging and http deps as soon as possible
|
||||
@@ -332,9 +333,6 @@ async def async_setup_hass(
|
||||
if not is_virtual_env():
|
||||
await async_mount_local_lib_path(runtime_config.config_dir)
|
||||
|
||||
if hass.config.safe_mode:
|
||||
_LOGGER.info("Starting in safe mode")
|
||||
|
||||
basic_setup_success = (
|
||||
await async_from_config_dict(config_dict, hass) is not None
|
||||
)
|
||||
@@ -387,6 +385,8 @@ async def async_setup_hass(
|
||||
{"recovery_mode": {}, "http": http_conf},
|
||||
hass,
|
||||
)
|
||||
elif hass.config.safe_mode:
|
||||
_LOGGER.info("Starting in safe mode")
|
||||
|
||||
if runtime_config.open_ui:
|
||||
hass.add_job(open_hass_ui, hass)
|
||||
@@ -396,7 +396,7 @@ async def async_setup_hass(
|
||||
|
||||
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
||||
"""Open the UI."""
|
||||
import webbrowser # noqa: PLC0415
|
||||
import webbrowser # pylint: disable=import-outside-toplevel
|
||||
|
||||
if hass.config.api is None or "frontend" not in hass.config.components:
|
||||
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
||||
@@ -454,8 +454,6 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(condition.async_setup(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
|
||||
|
||||
@@ -565,7 +563,8 @@ async def async_enable_logging(
|
||||
|
||||
if not log_no_color:
|
||||
try:
|
||||
from colorlog import ColoredFormatter # noqa: PLC0415
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
# basicConfig must be called after importing colorlog in order to
|
||||
# ensure that the handlers it sets up wraps the correct streams.
|
||||
@@ -695,10 +694,10 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
|
||||
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||
"""Get domains of components to set up."""
|
||||
# The common config section [homeassistant] could be filtered here,
|
||||
# but that is not necessary, since it corresponds to the core integration,
|
||||
# that is always unconditionally loaded.
|
||||
domains = {cv.domain_key(key) for key in config}
|
||||
# Filter out the repeating and common config section [homeassistant]
|
||||
domains = {
|
||||
domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
|
||||
}
|
||||
|
||||
# Add config entry and default domains
|
||||
if not hass.config.recovery_mode:
|
||||
@@ -726,28 +725,34 @@ async def _async_resolve_domains_and_preload(
|
||||
together with all their dependencies.
|
||||
"""
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
|
||||
# Also process all base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||
# so they can be setup first instead of discovering them later when a config
|
||||
# entry setup task notices that it's needed and there is already a long line
|
||||
# to use the import executor.
|
||||
#
|
||||
# Additionally process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
# For example if we have
|
||||
# sensor:
|
||||
# - platform: template
|
||||
#
|
||||
# `template` has to be loaded to validate the config for sensor.
|
||||
# The more platforms under `sensor:`, the longer
|
||||
# `template` has to be loaded to validate the config for sensor
|
||||
# so we want to start loading `sensor` as soon as we know
|
||||
# it will be needed. The more platforms under `sensor:`, the longer
|
||||
# it will take to finish setup for `sensor` because each of these
|
||||
# platforms has to be imported before we can validate the config.
|
||||
#
|
||||
# Thankfully we are migrating away from the platform pattern
|
||||
# so this will be less of a problem in the future.
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Additionally process base platforms since we do not require the manifest
|
||||
# to list them as dependencies.
|
||||
# We want to later avoid lock contention when multiple integrations try to load
|
||||
# their manifests at once.
|
||||
# Also process integrations that are defined under base platforms
|
||||
# to speed things up.
|
||||
additional_domains_to_process = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
@@ -865,9 +870,9 @@ async def _async_set_up_integrations(
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s\nDependencies: %s",
|
||||
domains or "{}",
|
||||
(all_domains - domains) or "{}",
|
||||
"Domains to be set up: %s | %s",
|
||||
domains,
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
@@ -876,6 +881,10 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in all_domains:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group, timeout)
|
||||
@@ -908,24 +917,19 @@ async def _async_set_up_integrations(
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s; already set up: %s\n"
|
||||
"Dependencies: %s; already set up: %s",
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
name,
|
||||
stage_domains,
|
||||
(stage_domains_unfiltered - stage_domains) or "{}",
|
||||
stage_dep_domains or "{}",
|
||||
(stage_dep_domains_unfiltered - stage_dep_domains) or "{}",
|
||||
stage_domains_unfiltered - stage_domains,
|
||||
stage_dep_domains,
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
)
|
||||
|
||||
if timeout is None:
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
try:
|
||||
async with hass.timeout.async_timeout(
|
||||
timeout,
|
||||
cool_down=COOLDOWN_TIME,
|
||||
cancel_message=f"Bootstrap stage {name} timeout",
|
||||
):
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
@@ -937,11 +941,7 @@ async def _async_set_up_integrations(
|
||||
# Wrap up startup
|
||||
_LOGGER.debug("Waiting for startup to wrap up")
|
||||
try:
|
||||
async with hass.timeout.async_timeout(
|
||||
WRAP_UP_TIMEOUT,
|
||||
cool_down=COOLDOWN_TIME,
|
||||
cancel_message="Bootstrap startup wrap up timeout",
|
||||
):
|
||||
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
||||
await hass.async_block_till_done()
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
|
@@ -3,7 +3,6 @@
|
||||
"name": "Amazon",
|
||||
"integrations": [
|
||||
"alexa",
|
||||
"alexa_devices",
|
||||
"amazon_polly",
|
||||
"aws",
|
||||
"aws_s3",
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "frient",
|
||||
"name": "Frient",
|
||||
"iot_standards": ["zigbee"]
|
||||
}
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "fritzbox",
|
||||
"name": "FRITZ!",
|
||||
"name": "FRITZ!Box",
|
||||
"integrations": ["fritz", "fritzbox", "fritzbox_callmonitor"]
|
||||
}
|
||||
|
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "shelly",
|
||||
"name": "shelly",
|
||||
"integrations": ["shelly"],
|
||||
"iot_standards": ["zwave"]
|
||||
}
|
@@ -1,11 +1,5 @@
|
||||
{
|
||||
"domain": "sony",
|
||||
"name": "Sony",
|
||||
"integrations": [
|
||||
"braviatv",
|
||||
"ps4",
|
||||
"sony_projector",
|
||||
"songpal",
|
||||
"playstation_network"
|
||||
]
|
||||
"integrations": ["braviatv", "ps4", "sony_projector", "songpal"]
|
||||
}
|
||||
|
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"domain": "switchbot",
|
||||
"name": "SwitchBot",
|
||||
"integrations": ["switchbot", "switchbot_cloud"],
|
||||
"iot_standards": ["matter"]
|
||||
"integrations": ["switchbot", "switchbot_cloud"]
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "third_reality",
|
||||
"name": "Third Reality",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
"iot_standards": ["zigbee"]
|
||||
}
|
||||
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "tilt",
|
||||
"name": "Tilt",
|
||||
"integrations": ["tilt_ble", "tilt_pi"]
|
||||
}
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "ubiquiti",
|
||||
"name": "Ubiquiti",
|
||||
"integrations": ["airos", "unifi", "unifi_direct", "unifiled", "unifiprotect"]
|
||||
"integrations": ["unifi", "unifi_direct", "unifiled", "unifiprotect"]
|
||||
}
|
||||
|
@@ -14,24 +14,30 @@ from jaraco.abode.exceptions import (
|
||||
)
|
||||
from jaraco.abode.helpers.timeline import Groups as GROUPS
|
||||
from requests.exceptions import ConnectTimeout, HTTPError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_DATE,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_TIME,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_POLLING, DOMAIN, LOGGER
|
||||
from .services import async_setup_services
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||
|
||||
ATTR_DEVICE_NAME = "device_name"
|
||||
ATTR_DEVICE_TYPE = "device_type"
|
||||
@@ -39,12 +45,22 @@ ATTR_EVENT_CODE = "event_code"
|
||||
ATTR_EVENT_NAME = "event_name"
|
||||
ATTR_EVENT_TYPE = "event_type"
|
||||
ATTR_EVENT_UTC = "event_utc"
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_USER_NAME = "user_name"
|
||||
ATTR_APP_TYPE = "app_type"
|
||||
ATTR_EVENT_BY = "event_by"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
CHANGE_SETTING_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
||||
)
|
||||
|
||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -69,7 +85,7 @@ class AbodeSystem:
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Abode component."""
|
||||
async_setup_services(hass)
|
||||
setup_hass_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
@@ -122,6 +138,60 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return unload_ok
|
||||
|
||||
|
||||
def setup_hass_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
def change_setting(call: ServiceCall) -> None:
|
||||
"""Change an Abode system setting."""
|
||||
setting = call.data[ATTR_SETTING]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
try:
|
||||
hass.data[DOMAIN].abode.set_setting(setting, value)
|
||||
except AbodeException as ex:
|
||||
LOGGER.warning(ex)
|
||||
|
||||
def capture_image(call: ServiceCall) -> None:
|
||||
"""Capture a new image."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_camera_capture_{entity_id}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
def trigger_automation(call: ServiceCall) -> None:
|
||||
"""Trigger an Abode automation."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_trigger_automation_{entity_id}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
async def setup_hass_events(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant start and stop callbacks."""
|
||||
|
||||
|
@@ -1,90 +0,0 @@
|
||||
"""Support for the Abode Security System."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jaraco.abode.exceptions import Exception as AbodeException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
|
||||
CHANGE_SETTING_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
||||
)
|
||||
|
||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
|
||||
def _change_setting(call: ServiceCall) -> None:
|
||||
"""Change an Abode system setting."""
|
||||
setting = call.data[ATTR_SETTING]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
try:
|
||||
call.hass.data[DOMAIN].abode.set_setting(setting, value)
|
||||
except AbodeException as ex:
|
||||
LOGGER.warning(ex)
|
||||
|
||||
|
||||
def _capture_image(call: ServiceCall) -> None:
|
||||
"""Capture a new image."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in call.hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_camera_capture_{entity_id}"
|
||||
dispatcher_send(call.hass, signal)
|
||||
|
||||
|
||||
def _trigger_automation(call: ServiceCall) -> None:
|
||||
"""Trigger an Abode automation."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in call.hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_trigger_automation_{entity_id}"
|
||||
dispatcher_send(call.hass, signal)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SETTINGS, _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_TRIGGER_AUTOMATION,
|
||||
_trigger_automation,
|
||||
schema=AUTOMATION_SCHEMA,
|
||||
)
|
@@ -40,10 +40,9 @@ class AcmedaFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
entry.unique_id for entry in self._async_current_entries()
|
||||
}
|
||||
|
||||
hubs: list[aiopulse.Hub] = []
|
||||
with suppress(TimeoutError):
|
||||
async with timeout(5):
|
||||
hubs = [
|
||||
hubs: list[aiopulse.Hub] = [
|
||||
hub
|
||||
async for hub in aiopulse.Hub.discover()
|
||||
if hub.id not in already_configured
|
||||
|
@@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import CONNECTION_TYPE, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
|
@@ -41,30 +41,7 @@ class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Adax."""
|
||||
try:
|
||||
if hasattr(self.adax_data_handler, "fetch_rooms_info"):
|
||||
rooms = await self.adax_data_handler.fetch_rooms_info() or []
|
||||
_LOGGER.debug("fetch_rooms_info returned: %s", rooms)
|
||||
else:
|
||||
_LOGGER.debug("fetch_rooms_info method not available, using get_rooms")
|
||||
rooms = []
|
||||
|
||||
if not rooms:
|
||||
_LOGGER.debug(
|
||||
"No rooms from fetch_rooms_info, trying get_rooms as fallback"
|
||||
)
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
_LOGGER.debug("get_rooms fallback returned: %s", rooms)
|
||||
|
||||
if not rooms:
|
||||
raise UpdateFailed("No rooms available from Adax API")
|
||||
|
||||
except OSError as e:
|
||||
raise UpdateFailed(f"Error communicating with API: {e}") from e
|
||||
|
||||
for room in rooms:
|
||||
room["energyWh"] = int(room.get("energyWh", 0))
|
||||
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
return {r["id"]: r for r in rooms}
|
||||
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "adax",
|
||||
"name": "Adax",
|
||||
"codeowners": ["@danielhiversen", "@lazytarget"],
|
||||
"codeowners": ["@danielhiversen"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/adax",
|
||||
"iot_class": "local_polling",
|
||||
|
@@ -1,77 +0,0 @@
|
||||
"""Support for Adax energy sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AdaxConfigEntry
|
||||
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdaxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax energy sensors with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
|
||||
# Create individual energy sensors for each device
|
||||
async_add_entities(
|
||||
AdaxEnergySensor(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
"""Representation of an Adax energy sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "energy"
|
||||
_attr_device_class = SensorDeviceClass.ENERGY
|
||||
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
_attr_suggested_display_precision = 3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the energy sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._device_id = device_id
|
||||
room = coordinator.data[device_id]
|
||||
|
||||
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=room["name"],
|
||||
manufacturer="Adax",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
"""Return the native value of the sensor."""
|
||||
return int(self.coordinator.data[self._device_id]["energyWh"])
|
@@ -185,7 +185,6 @@ FORECAST_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
||||
keys=[AOD_TOWN, AOD_FORECAST_DAILY, AOD_FORECAST_CURRENT, AOD_WIND_DIRECTION],
|
||||
name="Daily forecast wind bearing",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -193,7 +192,6 @@ FORECAST_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
||||
keys=[AOD_TOWN, AOD_FORECAST_HOURLY, AOD_FORECAST_CURRENT, AOD_WIND_DIRECTION],
|
||||
name="Hourly forecast wind bearing",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
@@ -336,8 +334,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
||||
keys=[AOD_WEATHER, AOD_WIND_DIRECTION],
|
||||
name="Wind bearing",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
key=ATTR_API_WIND_MAX_SPEED,
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
)
|
||||
|
||||
from . import AgentDVRConfigEntry
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN as AGENT_DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
|
||||
|
||||
@@ -82,7 +82,7 @@ class AgentCamera(MjpegCamera):
|
||||
still_image_url=f"{device.client._server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}", # noqa: SLF001
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
identifiers={(AGENT_DOMAIN, self.unique_id)},
|
||||
manufacturer="Agent",
|
||||
model="Camera",
|
||||
name=f"{device.client.name} {device.name}",
|
||||
|
@@ -1,231 +0,0 @@
|
||||
"""Integration to offer AI tasks to Home Assistant."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
HassJobType,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, selector, storage
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import (
|
||||
ATTR_ATTACHMENTS,
|
||||
ATTR_INSTRUCTIONS,
|
||||
ATTR_REQUIRED,
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
SERVICE_GENERATE_IMAGE,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_http
|
||||
from .task import (
|
||||
GenDataTask,
|
||||
GenDataTaskResult,
|
||||
GenImageTask,
|
||||
GenImageTaskResult,
|
||||
ImageData,
|
||||
async_generate_data,
|
||||
async_generate_image,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AITaskEntity",
|
||||
"AITaskEntityFeature",
|
||||
"GenDataTask",
|
||||
"GenDataTaskResult",
|
||||
"GenImageTask",
|
||||
"GenImageTaskResult",
|
||||
"ImageData",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
STRUCTURE_FIELD_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_DESCRIPTION): str,
|
||||
vol.Optional(ATTR_REQUIRED): bool,
|
||||
vol.Required(CONF_SELECTOR): selector.validate_selector,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _validate_structure_fields(value: dict[str, Any]) -> vol.Schema:
|
||||
"""Validate the structure fields as a voluptuous Schema."""
|
||||
if not isinstance(value, dict):
|
||||
raise vol.Invalid("Structure must be a dictionary")
|
||||
fields = {}
|
||||
for k, v in value.items():
|
||||
field_class = vol.Required if v.get(ATTR_REQUIRED, False) else vol.Optional
|
||||
fields[field_class(k, description=v.get(CONF_DESCRIPTION))] = selector.selector(
|
||||
v[CONF_SELECTOR]
|
||||
)
|
||||
return vol.Schema(fields, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Register the process service."""
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
hass.data[DATA_IMAGES] = {}
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
hass.http.register_view(ImageView)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
async_service_generate_data,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_STRUCTURE): vol.All(
|
||||
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
|
||||
_validate_structure_fields,
|
||||
),
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
job_type=HassJobType.Coroutinefunction,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_IMAGE,
|
||||
async_service_generate_image,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
job_type=HassJobType.Coroutinefunction,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
async def async_service_generate_data(call: ServiceCall) -> ServiceResponse:
|
||||
"""Run the data task service."""
|
||||
result = await async_generate_data(hass=call.hass, **call.data)
|
||||
return result.as_dict()
|
||||
|
||||
|
||||
async def async_service_generate_image(call: ServiceCall) -> ServiceResponse:
|
||||
"""Run the image task service."""
|
||||
return await async_generate_image(hass=call.hass, **call.data)
|
||||
|
||||
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
KEYS = ("gen_data_entity_id",)
|
||||
|
||||
gen_data_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
self._store: storage.Store[dict[str, str | None]] = storage.Store(
|
||||
hass, 1, DOMAIN
|
||||
)
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the data from the store."""
|
||||
data = await self._store.async_load()
|
||||
if data is None:
|
||||
return
|
||||
for key in self.KEYS:
|
||||
setattr(self, key, data[key])
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
changed = True
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
self._store.async_delay_save(self.as_dict, 10)
|
||||
|
||||
@callback
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {key: getattr(self, key) for key in self.KEYS}
|
||||
|
||||
|
||||
class ImageView(HomeAssistantView):
|
||||
"""View to generated images."""
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
filename: str,
|
||||
) -> web.Response:
|
||||
"""Serve image."""
|
||||
hass = request.app[KEY_HASS]
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
image_data = image_storage.get(filename)
|
||||
|
||||
if image_data is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
return web.Response(
|
||||
body=image_data.data,
|
||||
content_type=image_data.mime_type,
|
||||
)
|
@@ -1,49 +0,0 @@
|
||||
"""Constants for the AI Task integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntFlag
|
||||
from typing import TYPE_CHECKING, Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
from .task import ImageData
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
||||
|
||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||
MAX_IMAGES = 20
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||
|
||||
ATTR_INSTRUCTIONS: Final = "instructions"
|
||||
ATTR_TASK_NAME: Final = "task_name"
|
||||
ATTR_STRUCTURE: Final = "structure"
|
||||
ATTR_REQUIRED: Final = "required"
|
||||
ATTR_ATTACHMENTS: Final = "attachments"
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
)
|
||||
|
||||
|
||||
class AITaskEntityFeature(IntFlag):
|
||||
"""Supported features of the AI task entity."""
|
||||
|
||||
GENERATE_DATA = 1
|
||||
"""Generate data based on instructions."""
|
||||
|
||||
SUPPORT_ATTACHMENTS = 2
|
||||
"""Support attachments with generate data."""
|
||||
|
||||
GENERATE_IMAGE = 4
|
||||
"""Generate images based on instructions."""
|
@@ -1,126 +0,0 @@
|
||||
"""Entity for the AI Task integration."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
import contextlib
|
||||
from typing import final
|
||||
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.conversation import (
|
||||
ChatLog,
|
||||
UserContent,
|
||||
async_get_chat_log,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN, AITaskEntityFeature
|
||||
from .task import GenDataTask, GenDataTaskResult, GenImageTask, GenImageTaskResult
|
||||
|
||||
|
||||
class AITaskEntity(RestoreEntity):
|
||||
"""Entity that supports conversations."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = AITaskEntityFeature(0)
|
||||
__last_activity: str | None = None
|
||||
|
||||
@property
|
||||
@final
|
||||
def state(self) -> str | None:
|
||||
"""Return the state of the entity."""
|
||||
if self.__last_activity is None:
|
||||
return None
|
||||
return self.__last_activity
|
||||
|
||||
@cached_property
|
||||
def supported_features(self) -> AITaskEntityFeature:
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the entity is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if (
|
||||
state is not None
|
||||
and state.state is not None
|
||||
and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
):
|
||||
self.__last_activity = state.state
|
||||
|
||||
@final
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask | GenImageTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
None,
|
||||
) as chat_log,
|
||||
):
|
||||
await chat_log.async_provide_llm_data(
|
||||
llm.LLMContext(
|
||||
platform=self.platform.domain,
|
||||
context=None,
|
||||
language=None,
|
||||
assistant=DOMAIN,
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(
|
||||
UserContent(task.instructions, attachments=task.attachments)
|
||||
)
|
||||
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_data(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenDataTask,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a gen data task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
return await self._async_generate_data(task, chat_log)
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: GenDataTask,
|
||||
chat_log: ChatLog,
|
||||
) -> GenDataTaskResult:
|
||||
"""Handle a gen data task."""
|
||||
raise NotImplementedError
|
||||
|
||||
@final
|
||||
async def internal_async_generate_image(
|
||||
self,
|
||||
session: ChatSession,
|
||||
task: GenImageTask,
|
||||
) -> GenImageTaskResult:
|
||||
"""Run a gen image task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
|
||||
return await self._async_generate_image(task, chat_log)
|
||||
|
||||
async def _async_generate_image(
|
||||
self,
|
||||
task: GenImageTask,
|
||||
chat_log: ChatLog,
|
||||
) -> GenImageTaskResult:
|
||||
"""Handle a gen image task."""
|
||||
raise NotImplementedError
|
@@ -1,54 +0,0 @@
|
||||
"""HTTP endpoint for AI Task integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DATA_PREFERENCES
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the HTTP API for the conversation integration."""
|
||||
websocket_api.async_register_command(hass, websocket_get_preferences)
|
||||
websocket_api.async_register_command(hass, websocket_set_preferences)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/get",
|
||||
}
|
||||
)
|
||||
@callback
|
||||
def websocket_get_preferences(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get AI task preferences."""
|
||||
preferences = hass.data[DATA_PREFERENCES]
|
||||
connection.send_result(msg["id"], preferences.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_set_preferences(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set AI task preferences."""
|
||||
preferences = hass.data[DATA_PREFERENCES]
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
preferences.async_set_preferences(**msg)
|
||||
connection.send_result(msg_id, preferences.as_dict())
|
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:star-four-points"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"generate_data": {
|
||||
"service": "mdi:file-star-four-points-outline"
|
||||
},
|
||||
"generate_image": {
|
||||
"service": "mdi:star-four-points-box-outline"
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera", "http"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal"
|
||||
}
|
@@ -1,90 +0,0 @@
|
||||
"""Expose images as media sources."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
PlayMedia,
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
||||
"""Set up image media source."""
|
||||
_LOGGER.debug("Setting up image media source")
|
||||
return ImageMediaSource(hass)
|
||||
|
||||
|
||||
class ImageMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "AI Generated Images"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
image = image_storage.get(item.identifier)
|
||||
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
if item.identifier:
|
||||
raise BrowseError("Unknown item")
|
||||
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
|
||||
children = [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=filename,
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=image.mime_type,
|
||||
title=image.title or filename,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for filename, image in image_storage.items()
|
||||
]
|
||||
|
||||
return BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="AI Generated Images",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
children=children,
|
||||
)
|
@@ -1,59 +0,0 @@
|
||||
generate_data:
|
||||
fields:
|
||||
task_name:
|
||||
example: "home summary"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
instructions:
|
||||
example: "Generate a funny notification that the garage door was left open"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
entity_id:
|
||||
required: false
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
structure:
|
||||
required: false
|
||||
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
|
||||
selector:
|
||||
object:
|
||||
attachments:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
example: "picture of a dog"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
instructions:
|
||||
example: "Generate a high quality square image of a dog on transparent background"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain: ai_task
|
||||
supported_features:
|
||||
- ai_task.AITaskEntityFeature.GENERATE_IMAGE
|
||||
attachments:
|
||||
required: false
|
||||
selector:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
@@ -1,52 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"generate_data": {
|
||||
"name": "Generate data",
|
||||
"description": "Uses AI to run a task that generates data.",
|
||||
"fields": {
|
||||
"task_name": {
|
||||
"name": "Task name",
|
||||
"description": "Name of the task."
|
||||
},
|
||||
"instructions": {
|
||||
"name": "Instructions",
|
||||
"description": "Instructions on what needs to be done."
|
||||
},
|
||||
"entity_id": {
|
||||
"name": "Entity ID",
|
||||
"description": "Entity ID to run the task on. If not provided, the preferred entity will be used."
|
||||
},
|
||||
"structure": {
|
||||
"name": "Structured output",
|
||||
"description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field."
|
||||
},
|
||||
"attachments": {
|
||||
"name": "Attachments",
|
||||
"description": "List of files to attach for multi-modal AI analysis."
|
||||
}
|
||||
}
|
||||
},
|
||||
"generate_image": {
|
||||
"name": "Generate image",
|
||||
"description": "Uses AI to generate image.",
|
||||
"fields": {
|
||||
"task_name": {
|
||||
"name": "Task name",
|
||||
"description": "Name of the task."
|
||||
},
|
||||
"instructions": {
|
||||
"name": "Instructions",
|
||||
"description": "Instructions that explains the image to be generated."
|
||||
},
|
||||
"entity_id": {
|
||||
"name": "Entity ID",
|
||||
"description": "Entity ID to run the task on."
|
||||
},
|
||||
"attachments": {
|
||||
"name": "Attachments",
|
||||
"description": "List of files to attach for using as references."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,366 +0,0 @@
|
||||
"""AI tasks to be handled by agents."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
|
||||
from .const import (
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
IMAGE_EXPIRY_TIME,
|
||||
MAX_IMAGES,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
|
||||
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
async def _resolve_attachments(
|
||||
hass: HomeAssistant,
|
||||
session: ChatSession,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> list[conversation.Attachment]:
|
||||
"""Resolve attachments for a task."""
|
||||
resolved_attachments: list[conversation.Attachment] = []
|
||||
created_files: list[Path] = []
|
||||
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
if media.path is None:
|
||||
raise HomeAssistantError(
|
||||
"Only local attachments are currently supported"
|
||||
)
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=media.mime_type,
|
||||
path=media.path,
|
||||
)
|
||||
)
|
||||
|
||||
if not created_files:
|
||||
return resolved_attachments
|
||||
|
||||
def cleanup_files() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
for file in created_files:
|
||||
file.unlink(missing_ok=True)
|
||||
|
||||
@callback
|
||||
def cleanup_files_callback() -> None:
|
||||
"""Cleanup temporary files."""
|
||||
hass.async_add_executor_job(cleanup_files)
|
||||
|
||||
session.async_on_cleanup(cleanup_files_callback)
|
||||
|
||||
return resolved_attachments
|
||||
|
||||
|
||||
async def async_generate_data(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a data generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
if AITaskEntityFeature.GENERATE_DATA not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support generating data"
|
||||
)
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
return await entity.internal_async_generate_data(
|
||||
session,
|
||||
GenDataTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
||||
"""Remove old images to keep the storage size under the limit."""
|
||||
if num_to_remove <= 0:
|
||||
return
|
||||
|
||||
if num_to_remove >= len(image_storage):
|
||||
image_storage.clear()
|
||||
return
|
||||
|
||||
sorted_images = sorted(
|
||||
image_storage.items(),
|
||||
key=lambda item: item[1].timestamp,
|
||||
)
|
||||
|
||||
for filename, _ in sorted_images[:num_to_remove]:
|
||||
image_storage.pop(filename, None)
|
||||
|
||||
|
||||
async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str,
|
||||
instructions: str,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> ServiceResponse:
|
||||
"""Run an image generation task in the AI Task integration."""
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
if AITaskEntityFeature.GENERATE_IMAGE not in entity.supported_features:
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support generating images"
|
||||
)
|
||||
|
||||
if (
|
||||
attachments
|
||||
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
f"AI Task entity {entity_id} does not support attachments"
|
||||
)
|
||||
|
||||
with async_get_chat_session(hass) as session:
|
||||
resolved_attachments = await _resolve_attachments(hass, session, attachments)
|
||||
|
||||
task_result = await entity.internal_async_generate_image(
|
||||
session,
|
||||
GenImageTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
attachments=resolved_attachments or None,
|
||||
),
|
||||
)
|
||||
|
||||
service_result = task_result.as_dict()
|
||||
image_data = service_result.pop("image_data")
|
||||
if service_result.get("revised_prompt") is None:
|
||||
service_result["revised_prompt"] = instructions
|
||||
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
|
||||
if len(image_storage) + 1 > MAX_IMAGES:
|
||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
||||
|
||||
current_time = datetime.now()
|
||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
||||
|
||||
image_storage[filename] = ImageData(
|
||||
data=image_data,
|
||||
timestamp=int(current_time.timestamp()),
|
||||
mime_type=task_result.mime_type,
|
||||
title=service_result["revised_prompt"],
|
||||
)
|
||||
|
||||
def _purge_image(filename: str, now: datetime) -> None:
|
||||
"""Remove image from storage."""
|
||||
image_storage.pop(filename, None)
|
||||
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
|
||||
service_result["url"] = get_url(hass) + async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenDataTask:
|
||||
"""Gen data task to be processed."""
|
||||
|
||||
name: str
|
||||
"""Name of the task."""
|
||||
|
||||
instructions: str
|
||||
"""Instructions on what needs to be done."""
|
||||
|
||||
structure: vol.Schema | None = None
|
||||
"""Optional structure for the data to be generated."""
|
||||
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenDataTaskResult:
|
||||
"""Result of gen data task."""
|
||||
|
||||
conversation_id: str
|
||||
"""Unique identifier for the conversation."""
|
||||
|
||||
data: Any
|
||||
"""Data generated by the task."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"data": self.data,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenImageTask:
|
||||
"""Gen image task to be processed."""
|
||||
|
||||
name: str
|
||||
"""Name of the task."""
|
||||
|
||||
instructions: str
|
||||
"""Instructions on what needs to be done."""
|
||||
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenImageTask {self.name}: {id(self)}>"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenImageTaskResult:
|
||||
"""Result of gen image task."""
|
||||
|
||||
image_data: bytes
|
||||
"""Raw image data generated by the model."""
|
||||
|
||||
conversation_id: str
|
||||
"""Unique identifier for the conversation."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the generated image."""
|
||||
|
||||
width: int | None = None
|
||||
"""Width of the generated image, if available."""
|
||||
|
||||
height: int | None = None
|
||||
"""Height of the generated image, if available."""
|
||||
|
||||
model: str | None = None
|
||||
"""Model used to generate the image, if available."""
|
||||
|
||||
revised_prompt: str | None = None
|
||||
"""Revised prompt used to generate the image, if applicable."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"image_data": self.image_data,
|
||||
"conversation_id": self.conversation_id,
|
||||
"mime_type": self.mime_type,
|
||||
"width": self.width,
|
||||
"height": self.height,
|
||||
"model": self.model,
|
||||
"revised_prompt": self.revised_prompt,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ImageData:
|
||||
"""Image data for stored generated images."""
|
||||
|
||||
data: bytes
|
||||
"""Raw image data."""
|
||||
|
||||
timestamp: int
|
||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the image."""
|
||||
|
||||
title: str
|
||||
"""Title of the image, usually the prompt used to generate it."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return image data as a string."""
|
||||
return f"<ImageData {self.title}: {id(self)}>"
|
@@ -51,16 +51,9 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
try:
|
||||
self._current_version = (
|
||||
await self.client.get_current_measures()
|
||||
).firmware_version
|
||||
except AirGradientError as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
self._current_version = (
|
||||
await self.client.get_current_measures()
|
||||
).firmware_version
|
||||
|
||||
async def _async_update_data(self) -> AirGradientData:
|
||||
try:
|
||||
|
@@ -6,7 +6,6 @@ from typing import Any, Concatenate
|
||||
from airgradient import AirGradientConnectionError, AirGradientError, get_model_name
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -30,7 +29,6 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]):
|
||||
model_id=measures.model,
|
||||
serial_number=coordinator.serial_number,
|
||||
sw_version=measures.firmware_version,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, coordinator.serial_number)},
|
||||
)
|
||||
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["airgradient==0.9.2"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
@@ -14,9 +14,9 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -34,7 +34,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
@@ -43,19 +43,23 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: done
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
@@ -61,7 +61,7 @@
|
||||
"display_pm_standard": {
|
||||
"name": "Display PM standard",
|
||||
"state": {
|
||||
"ugm3": "μg/m³",
|
||||
"ugm3": "µg/m³",
|
||||
"us_aqi": "US AQI"
|
||||
}
|
||||
},
|
||||
|
@@ -39,14 +39,14 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
try:
|
||||
location_point_valid = await check_location(
|
||||
location_point_valid = await test_location(
|
||||
websession,
|
||||
user_input["api_key"],
|
||||
user_input["latitude"],
|
||||
user_input["longitude"],
|
||||
)
|
||||
if not location_point_valid:
|
||||
location_nearest_valid = await check_location(
|
||||
location_nearest_valid = await test_location(
|
||||
websession,
|
||||
user_input["api_key"],
|
||||
user_input["latitude"],
|
||||
@@ -88,7 +88,7 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
async def check_location(
|
||||
async def test_location(
|
||||
client: ClientSession,
|
||||
api_key: str,
|
||||
latitude: float,
|
||||
|
@@ -45,6 +45,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bo
|
||||
# Store Entity and Initialize Platforms
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
# Listen for option changes
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Clean up unused device entries with no entities
|
||||
@@ -85,3 +88,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithReload,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -126,7 +126,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AirNowOptionsFlowHandler()
|
||||
|
||||
|
||||
class AirNowOptionsFlowHandler(OptionsFlowWithReload):
|
||||
class AirNowOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an options flow for AirNow."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -71,7 +71,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
data: dict[str, Any] = {}
|
||||
data = {}
|
||||
try:
|
||||
obs = await self.airnow.observations.latLong(
|
||||
self.latitude,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airnow",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyairnow"],
|
||||
"requirements": ["pyairnow==1.3.1"]
|
||||
"requirements": ["pyairnow==1.2.1"]
|
||||
}
|
||||
|
@@ -1,45 +0,0 @@
|
||||
"""The Ubiquiti airOS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS8
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Set up Ubiquiti airOS from a config entry."""
|
||||
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
@@ -1,106 +0,0 @@
|
||||
"""AirOS Binary Sensor component for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describe an AirOS binary sensor."""
|
||||
|
||||
value_fn: Callable[[AirOS8Data], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
|
||||
AirOSBinarySensorEntityDescription(
|
||||
key="portfw",
|
||||
translation_key="port_forwarding",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.portfw,
|
||||
),
|
||||
AirOSBinarySensorEntityDescription(
|
||||
key="dhcp_client",
|
||||
translation_key="dhcp_client",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.services.dhcpc,
|
||||
),
|
||||
AirOSBinarySensorEntityDescription(
|
||||
key="dhcp_server",
|
||||
translation_key="dhcp_server",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.services.dhcpd,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirOSBinarySensorEntityDescription(
|
||||
key="dhcp6_server",
|
||||
translation_key="dhcp6_server",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.services.dhcp6d_stateful,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirOSBinarySensorEntityDescription(
|
||||
key="pppoe",
|
||||
translation_key="pppoe",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.services.pppoe,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS binary sensors from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AirOSBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
|
||||
"""Representation of a binary sensor."""
|
||||
|
||||
entity_description: AirOSBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirOSDataUpdateCoordinator,
|
||||
description: AirOSBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the binary sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
@@ -1,82 +0,0 @@
|
||||
"""Config flow for the Ubiquiti airOS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDataMissingError,
|
||||
AirOSDeviceConnectionError,
|
||||
AirOSKeyDataMissingError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ubiquiti airOS."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=airos_data.host.hostname, data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
@@ -1,9 +0,0 @@
|
||||
"""Constants for the Ubiquiti airOS integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
DOMAIN = "airos"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
@@ -1,70 +0,0 @@
|
||||
"""DataUpdateCoordinator for AirOS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from airos.airos8 import AirOS8, AirOS8Data
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDataMissingError,
|
||||
AirOSDeviceConnectionError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
"""Class to manage fetching AirOS data from single endpoint."""
|
||||
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS8
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airos_device = airos_device
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOS8Data:
|
||||
"""Fetch data from AirOS."""
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
return await self.airos_device.status()
|
||||
except (AirOSConnectionAuthenticationError,) as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
TimeoutError,
|
||||
) as err:
|
||||
_LOGGER.error("Error connecting to airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except (AirOSDataMissingError,) as err:
|
||||
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_data_missing",
|
||||
) from err
|
@@ -1,33 +0,0 @@
|
||||
"""Diagnostics support for airOS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirOSConfigEntry
|
||||
|
||||
IP_REDACT = ["addr", "ipaddr", "ip6addr", "lastip"] # IP related
|
||||
HW_REDACT = ["apmac", "hwaddr", "mac"] # MAC address
|
||||
TO_REDACT_HA = [CONF_HOST, CONF_PASSWORD]
|
||||
TO_REDACT_AIROS = [
|
||||
"hostname", # Prevent leaking device naming
|
||||
"essid", # Network SSID
|
||||
"lat", # GPS latitude to prevent exposing location data.
|
||||
"lon", # GPS longitude to prevent exposing location data.
|
||||
*HW_REDACT,
|
||||
*IP_REDACT,
|
||||
]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AirOSConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
|
||||
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
|
||||
}
|
@@ -1,36 +0,0 @@
|
||||
"""Generic AirOS Entity Class."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import AirOSDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
||||
"""Represent a AirOS Entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: AirOSDataUpdateCoordinator) -> None:
|
||||
"""Initialise the gateway."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
airos_data = self.coordinator.data
|
||||
|
||||
configuration_url: str | None = (
|
||||
f"https://{coordinator.config_entry.data[CONF_HOST]}"
|
||||
)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)},
|
||||
configuration_url=configuration_url,
|
||||
identifiers={(DOMAIN, str(airos_data.host.device_id))},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=airos_data.host.devmodel,
|
||||
name=airos_data.host.hostname,
|
||||
sw_version=airos_data.host.fwversion,
|
||||
)
|
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "airos",
|
||||
"name": "Ubiquiti airOS",
|
||||
"codeowners": ["@CoMPaTech"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.1"]
|
||||
}
|
@@ -1,70 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: airOS does not have actions
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: airOS does not have actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: local_polling without events
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: airOS does not have actions
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: no (custom) icons used or envisioned
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -1,194 +0,0 @@
|
||||
"""AirOS Sensor component for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS,
|
||||
UnitOfDataRate,
|
||||
UnitOfFrequency,
|
||||
UnitOfLength,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NETROLE_OPTIONS = [mode.value for mode in NetRole]
|
||||
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
|
||||
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirOSSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe an AirOS sensor."""
|
||||
|
||||
value_fn: Callable[[AirOS8Data], StateType]
|
||||
|
||||
|
||||
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
||||
AirOSSensorEntityDescription(
|
||||
key="host_cpuload",
|
||||
translation_key="host_cpuload",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
value_fn=lambda data: data.host.cpuload,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="host_netrole",
|
||||
translation_key="host_netrole",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: data.host.netrole.value,
|
||||
options=NETROLE_OPTIONS,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_frequency",
|
||||
translation_key="wireless_frequency",
|
||||
native_unit_of_measurement=UnitOfFrequency.MEGAHERTZ,
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda data: data.wireless.frequency,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_essid",
|
||||
translation_key="wireless_essid",
|
||||
value_fn=lambda data: data.wireless.essid,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_antenna_gain",
|
||||
translation_key="wireless_antenna_gain",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda data: data.wireless.antenna_gain,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_throughput_tx",
|
||||
translation_key="wireless_throughput_tx",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||
value_fn=lambda data: data.wireless.throughput.tx,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_throughput_rx",
|
||||
translation_key="wireless_throughput_rx",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||
value_fn=lambda data: data.wireless.throughput.rx,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_polling_dl_capacity",
|
||||
translation_key="wireless_polling_dl_capacity",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||
value_fn=lambda data: data.wireless.polling.dl_capacity,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_polling_ul_capacity",
|
||||
translation_key="wireless_polling_ul_capacity",
|
||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||
device_class=SensorDeviceClass.DATA_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||
value_fn=lambda data: data.wireless.polling.ul_capacity,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="host_uptime",
|
||||
translation_key="host_uptime",
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
suggested_display_precision=0,
|
||||
suggested_unit_of_measurement=UnitOfTime.DAYS,
|
||||
value_fn=lambda data: data.host.uptime,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_distance",
|
||||
translation_key="wireless_distance",
|
||||
native_unit_of_measurement=UnitOfLength.METERS,
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
suggested_display_precision=1,
|
||||
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||
value_fn=lambda data: data.wireless.distance,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_mode",
|
||||
translation_key="wireless_mode",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: data.derived.mode.value,
|
||||
options=WIRELESS_MODE_OPTIONS,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
AirOSSensorEntityDescription(
|
||||
key="wireless_role",
|
||||
translation_key="wireless_role",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
value_fn=lambda data: data.derived.role.value,
|
||||
options=WIRELESS_ROLE_OPTIONS,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS sensors from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(AirOSSensor(coordinator, description) for description in SENSORS)
|
||||
|
||||
|
||||
class AirOSSensor(AirOSEntity, SensorEntity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
entity_description: AirOSSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirOSDataUpdateCoordinator,
|
||||
description: AirOSSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
@@ -1,117 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"flow_title": "Ubiquiti airOS device",
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "IP address or hostname of the airOS device",
|
||||
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
||||
"password": "Password configured through the UISP app or web interface"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"key_data_missing": "Expected data not returned from the device, check the documentation for supported devices",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"port_forwarding": {
|
||||
"name": "Port forwarding"
|
||||
},
|
||||
"dhcp_client": {
|
||||
"name": "DHCP client"
|
||||
},
|
||||
"dhcp_server": {
|
||||
"name": "DHCP server"
|
||||
},
|
||||
"dhcp6_server": {
|
||||
"name": "DHCPv6 server"
|
||||
},
|
||||
"pppoe": {
|
||||
"name": "PPPoE link"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"host_cpuload": {
|
||||
"name": "CPU load"
|
||||
},
|
||||
"host_netrole": {
|
||||
"name": "Network role",
|
||||
"state": {
|
||||
"bridge": "Bridge",
|
||||
"router": "Router"
|
||||
}
|
||||
},
|
||||
"wireless_frequency": {
|
||||
"name": "Wireless frequency"
|
||||
},
|
||||
"wireless_essid": {
|
||||
"name": "Wireless SSID"
|
||||
},
|
||||
"wireless_antenna_gain": {
|
||||
"name": "Antenna gain"
|
||||
},
|
||||
"wireless_throughput_tx": {
|
||||
"name": "Throughput transmit (actual)"
|
||||
},
|
||||
"wireless_throughput_rx": {
|
||||
"name": "Throughput receive (actual)"
|
||||
},
|
||||
"wireless_polling_dl_capacity": {
|
||||
"name": "Download capacity"
|
||||
},
|
||||
"wireless_polling_ul_capacity": {
|
||||
"name": "Upload capacity"
|
||||
},
|
||||
"wireless_remote_hostname": {
|
||||
"name": "Remote hostname"
|
||||
},
|
||||
"host_uptime": {
|
||||
"name": "Uptime"
|
||||
},
|
||||
"wireless_distance": {
|
||||
"name": "Wireless distance"
|
||||
},
|
||||
"wireless_role": {
|
||||
"name": "Wireless role",
|
||||
"state": {
|
||||
"access_point": "Access point",
|
||||
"station": "Station"
|
||||
}
|
||||
},
|
||||
"wireless_mode": {
|
||||
"name": "Wireless mode",
|
||||
"state": {
|
||||
"point_to_point": "Point-to-point",
|
||||
"point_to_multipoint": "Point-to-multipoint"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"invalid_auth": {
|
||||
"message": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"key_data_missing": {
|
||||
"message": "Key data not returned from device"
|
||||
},
|
||||
"error_data_missing": {
|
||||
"message": "Data incomplete or missing"
|
||||
}
|
||||
}
|
||||
}
|
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
|
||||
from .coordinator import AirQCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
AirQConfigEntry = ConfigEntry[AirQCoordinator]
|
||||
|
||||
|
@@ -6,5 +6,6 @@ CONF_RETURN_AVERAGE: Final = "return_average"
|
||||
CONF_CLIP_NEGATIVE: Final = "clip_negatives"
|
||||
DOMAIN: Final = "airq"
|
||||
MANUFACTURER: Final = "CorantGmbH"
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
|
||||
UPDATE_INTERVAL: float = 10.0
|
||||
|
@@ -10,7 +10,7 @@ from aioairq.core import AirQ, identify_warming_up_sensors
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
@@ -39,7 +39,7 @@ class AirQCoordinator(DataUpdateCoordinator):
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
)
|
||||
session = async_create_clientsession(hass)
|
||||
session = async_get_clientsession(hass)
|
||||
self.airq = AirQ(
|
||||
entry.data[CONF_IP_ADDRESS], entry.data[CONF_PASSWORD], session
|
||||
)
|
||||
@@ -75,7 +75,6 @@ class AirQCoordinator(DataUpdateCoordinator):
|
||||
return_average=self.return_average,
|
||||
clip_negative_values=self.clip_negative,
|
||||
)
|
||||
data["brightness"] = await self.airq.get_current_brightness()
|
||||
if warming_up_sensors := identify_warming_up_sensors(data):
|
||||
_LOGGER.debug(
|
||||
"Following sensors are still warming up: %s", warming_up_sensors
|
||||
|
@@ -4,6 +4,9 @@
|
||||
"health_index": {
|
||||
"default": "mdi:heart-pulse"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"oxygen": {
|
||||
"default": "mdi:leaf"
|
||||
},
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.6"]
|
||||
"requirements": ["aioairq==0.4.4"]
|
||||
}
|
||||
|
@@ -1,85 +0,0 @@
|
||||
"""Definition of air-Q number platform used to control the LED strips."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from aioairq.core import AirQ
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirQConfigEntry, AirQCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirQBrightnessDescription(NumberEntityDescription):
|
||||
"""Describes AirQ number entity responsible for brightness control."""
|
||||
|
||||
value: Callable[[dict], float]
|
||||
set_value: Callable[[AirQ, float], Awaitable[None]]
|
||||
|
||||
|
||||
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
|
||||
key="airq_led_brightness",
|
||||
translation_key="airq_led_brightness",
|
||||
native_min_value=0.0,
|
||||
native_max_value=100.0,
|
||||
native_step=1.0,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value=lambda data: data["brightness"],
|
||||
set_value=lambda device, value: device.set_current_brightness(value),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirQConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up number entities: a single entity for the LEDs."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
|
||||
"""Representation of the LEDs from a single AirQ."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirQCoordinator,
|
||||
description: AirQBrightnessDescription,
|
||||
) -> None:
|
||||
"""Initialize a single sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description: AirQBrightnessDescription = description
|
||||
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the brightness of the LEDs in %."""
|
||||
return self.entity_description.value(self.coordinator.data)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the brightness of the LEDs to the value in %."""
|
||||
_LOGGER.debug(
|
||||
"Changing LED brighntess from %.0f%% to %.0f%%",
|
||||
self.coordinator.data["brightness"],
|
||||
value,
|
||||
)
|
||||
await self.entity_description.set_value(self.coordinator.airq, value)
|
||||
await self.coordinator.async_request_refresh()
|
@@ -14,7 +14,6 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
@@ -29,7 +28,10 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirQConfigEntry, AirQCoordinator
|
||||
from .const import ACTIVITY_BECQUEREL_PER_CUBIC_METER
|
||||
from .const import (
|
||||
ACTIVITY_BECQUEREL_PER_CUBIC_METER,
|
||||
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -193,7 +195,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
|
||||
),
|
||||
AirQEntityDescription(
|
||||
key="humidity_abs",
|
||||
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||
translation_key="absolute_humidity",
|
||||
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value=lambda data: data.get("humidity_abs"),
|
||||
|
@@ -35,11 +35,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"airq_led_brightness": {
|
||||
"name": "LED brightness"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"acetaldehyde": {
|
||||
"name": "Acetaldehyde"
|
||||
@@ -98,6 +93,9 @@
|
||||
"health_index": {
|
||||
"name": "Health index"
|
||||
},
|
||||
"absolute_humidity": {
|
||||
"name": "Absolute humidity"
|
||||
},
|
||||
"hydrogen": {
|
||||
"name": "Hydrogen"
|
||||
},
|
||||
|
@@ -5,20 +5,24 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings import Airthings
|
||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_SECRET
|
||||
from .coordinator import AirthingsConfigEntry, AirthingsDataUpdateCoordinator
|
||||
from .const import CONF_SECRET, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
SCAN_INTERVAL = timedelta(minutes=6)
|
||||
|
||||
type AirthingsDataCoordinatorType = DataUpdateCoordinator[dict[str, AirthingsDevice]]
|
||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataCoordinatorType]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
|
||||
"""Set up Airthings from a config entry."""
|
||||
@@ -28,8 +32,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
|
||||
async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
coordinator = AirthingsDataUpdateCoordinator(hass, airthings, entry)
|
||||
async def _update_method() -> dict[str, AirthingsDevice]:
|
||||
"""Get the latest data from Airthings."""
|
||||
try:
|
||||
return await airthings.update_devices() # type: ignore[no-any-return]
|
||||
except AirthingsError as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=_update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
@@ -45,8 +45,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
errors = {}
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
await airthings.get_token(
|
||||
@@ -62,6 +60,9 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(title="Airthings", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -1,45 +0,0 @@
|
||||
"""The Airthings integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = timedelta(minutes=6)
|
||||
|
||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
|
||||
"""Coordinator for Airthings data updates."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
airthings: Airthings,
|
||||
config_entry: AirthingsConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_method=self._update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.airthings = airthings
|
||||
|
||||
async def _update_method(self) -> dict[str, AirthingsDevice]:
|
||||
"""Get the latest data from Airthings."""
|
||||
try:
|
||||
return await self.airthings.update_devices() # type: ignore[no-any-return]
|
||||
except AirthingsError as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
@@ -3,19 +3,6 @@
|
||||
"name": "Airthings",
|
||||
"codeowners": ["@danielhiversen", "@LaStrada"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "airthings-view"
|
||||
},
|
||||
{
|
||||
"hostname": "airthings-hub",
|
||||
"macaddress": "D0141190*"
|
||||
},
|
||||
{
|
||||
"hostname": "airthings-hub",
|
||||
"macaddress": "70B3D52A0*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["airthings"],
|
||||
|
@@ -14,12 +14,10 @@ from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS,
|
||||
EntityCategory,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -28,44 +26,32 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirthingsConfigEntry
|
||||
from . import AirthingsConfigEntry, AirthingsDataCoordinatorType
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirthingsDataUpdateCoordinator
|
||||
|
||||
SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"radonShortTermAvg": SensorEntityDescription(
|
||||
key="radonShortTermAvg",
|
||||
native_unit_of_measurement="Bq/m³",
|
||||
translation_key="radon",
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"temp": SensorEntityDescription(
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"sla": SensorEntityDescription(
|
||||
key="sla",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -73,47 +59,34 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -122,21 +95,18 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -150,7 +120,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
entities = [
|
||||
AirthingsDeviceSensor(
|
||||
AirthingsHeaterEnergySensor(
|
||||
coordinator,
|
||||
airthings_device,
|
||||
SENSORS[sensor_types],
|
||||
@@ -162,8 +132,8 @@ async def async_setup_entry(
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AirthingsDeviceSensor(
|
||||
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
|
||||
class AirthingsHeaterEnergySensor(
|
||||
CoordinatorEntity[AirthingsDataCoordinatorType], SensorEntity
|
||||
):
|
||||
"""Representation of a Airthings Sensor device."""
|
||||
|
||||
@@ -172,7 +142,7 @@ class AirthingsDeviceSensor(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirthingsDataUpdateCoordinator,
|
||||
coordinator: AirthingsDataCoordinatorType,
|
||||
airthings_device: AirthingsDevice,
|
||||
entity_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
|
@@ -142,7 +142,7 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
||||
return AT_TO_HA_STATE[self._airtouch.acs[self._ac_number].AcMode]
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
def hvac_modes(self):
|
||||
"""Return the list of available operation modes."""
|
||||
airtouch_modes = self._airtouch.GetSupportedCoolingModesForAc(self._ac_number)
|
||||
modes = [AT_TO_HA_STATE[mode] for mode in airtouch_modes]
|
||||
@@ -226,12 +226,12 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
return super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
def min_temp(self):
|
||||
"""Return Minimum Temperature for AC of this group."""
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MinSetpoint
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
def max_temp(self):
|
||||
"""Return Max Temperature for AC of this group."""
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MaxSetpoint
|
||||
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["airtouch5py"],
|
||||
"requirements": ["airtouch5py==0.3.0"]
|
||||
"requirements": ["airtouch5py==0.2.11"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.7.2"]
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
}
|
||||
|
@@ -2,112 +2,39 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
config_entry_oauth2_flow,
|
||||
device_registry as dr,
|
||||
)
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from . import api
|
||||
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
|
||||
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.COVER, Platform.SENSOR]
|
||||
DOMAIN = "aladdin_connect"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Aladdin Connect Genie from a config entry."""
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
|
||||
"""Set up Aladdin Connect from a config entry."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
DOMAIN,
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="integration_removed",
|
||||
translation_placeholders={
|
||||
"entries": "/config/integrations/integration/aladdin_connect",
|
||||
},
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
|
||||
client = AladdinConnectClient(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
for door in doors
|
||||
}
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
remove_stale_devices(hass, entry)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old config."""
|
||||
if config_entry.version < CONFIG_FLOW_VERSION:
|
||||
config_entry.async_start_reauth(hass)
|
||||
new_data = {**config_entry.data}
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
data=new_data,
|
||||
version=CONFIG_FLOW_VERSION,
|
||||
minor_version=CONFIG_FLOW_MINOR_VERSION,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def remove_stale_devices(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AladdinConnectConfigEntry,
|
||||
) -> None:
|
||||
"""Remove stale devices from device registry."""
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entries = dr.async_entries_for_config_entry(
|
||||
device_registry, config_entry.entry_id
|
||||
)
|
||||
all_device_ids = set(config_entry.runtime_data)
|
||||
|
||||
for device_entry in device_entries:
|
||||
device_id: str | None = None
|
||||
for identifier in device_entry.identifiers:
|
||||
if identifier[0] == DOMAIN:
|
||||
device_id = identifier[1]
|
||||
break
|
||||
|
||||
if device_id and device_id not in all_device_ids:
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Remove a config entry."""
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
ir.async_delete_issue(hass, DOMAIN, DOMAIN)
|
||||
# Remove any remaining disabled or ignored entries
|
||||
for _entry in hass.config_entries.async_entries(DOMAIN):
|
||||
hass.async_create_task(hass.config_entries.async_remove(_entry.entry_id))
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user