mirror of
https://github.com/home-assistant/core.git
synced 2025-07-31 17:18:23 +00:00
Merge branch 'dev' into fail_on_templated_service_data
This commit is contained in:
commit
3c0c71212b
@ -6,6 +6,7 @@ core: &core
|
|||||||
- homeassistant/helpers/**
|
- homeassistant/helpers/**
|
||||||
- homeassistant/package_constraints.txt
|
- homeassistant/package_constraints.txt
|
||||||
- homeassistant/util/**
|
- homeassistant/util/**
|
||||||
|
- mypy.ini
|
||||||
- pyproject.toml
|
- pyproject.toml
|
||||||
- requirements.txt
|
- requirements.txt
|
||||||
- setup.cfg
|
- setup.cfg
|
||||||
@ -79,6 +80,7 @@ components: &components
|
|||||||
- homeassistant/components/group/**
|
- homeassistant/components/group/**
|
||||||
- homeassistant/components/hassio/**
|
- homeassistant/components/hassio/**
|
||||||
- homeassistant/components/homeassistant/**
|
- homeassistant/components/homeassistant/**
|
||||||
|
- homeassistant/components/homeassistant_hardware/**
|
||||||
- homeassistant/components/http/**
|
- homeassistant/components/http/**
|
||||||
- homeassistant/components/image/**
|
- homeassistant/components/image/**
|
||||||
- homeassistant/components/input_boolean/**
|
- homeassistant/components/input_boolean/**
|
||||||
@ -130,6 +132,7 @@ tests: &tests
|
|||||||
- tests/components/conftest.py
|
- tests/components/conftest.py
|
||||||
- tests/components/diagnostics/**
|
- tests/components/diagnostics/**
|
||||||
- tests/components/history/**
|
- tests/components/history/**
|
||||||
|
- tests/components/light/common.py
|
||||||
- tests/components/logbook/**
|
- tests/components/logbook/**
|
||||||
- tests/components/recorder/**
|
- tests/components/recorder/**
|
||||||
- tests/components/repairs/**
|
- tests/components/repairs/**
|
||||||
|
@ -62,7 +62,7 @@
|
|||||||
"json.schemas": [
|
"json.schemas": [
|
||||||
{
|
{
|
||||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||||
"url": "./script/json_schemas/manifest_schema.json"
|
"url": "${containerWorkspaceFolder}/script/json_schemas/manifest_schema.json"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
11
.gitattributes
vendored
11
.gitattributes
vendored
@ -11,3 +11,14 @@
|
|||||||
*.pcm binary
|
*.pcm binary
|
||||||
|
|
||||||
Dockerfile.dev linguist-language=Dockerfile
|
Dockerfile.dev linguist-language=Dockerfile
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
CODEOWNERS linguist-generated=true
|
||||||
|
Dockerfile linguist-generated=true
|
||||||
|
homeassistant/generated/*.py linguist-generated=true
|
||||||
|
mypy.ini linguist-generated=true
|
||||||
|
requirements.txt linguist-generated=true
|
||||||
|
requirements_all.txt linguist-generated=true
|
||||||
|
requirements_test_all.txt linguist-generated=true
|
||||||
|
requirements_test_pre_commit.txt linguist-generated=true
|
||||||
|
script/hassfest/docker/Dockerfile linguist-generated=true
|
||||||
|
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -6,9 +6,9 @@ body:
|
|||||||
value: |
|
value: |
|
||||||
This issue form is for reporting bugs only!
|
This issue form is for reporting bugs only!
|
||||||
|
|
||||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
If you have a feature or enhancement request, please [request them here instead][fr].
|
||||||
|
|
||||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
[fr]: https://github.com/orgs/home-assistant/discussions
|
||||||
- type: textarea
|
- type: textarea
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -10,8 +10,8 @@ contact_links:
|
|||||||
url: https://www.home-assistant.io/help
|
url: https://www.home-assistant.io/help
|
||||||
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
about: We use GitHub for tracking bugs, check our website for resources on getting help.
|
||||||
- name: Feature Request
|
- name: Feature Request
|
||||||
url: https://community.home-assistant.io/c/feature-requests
|
url: https://github.com/orgs/home-assistant/discussions
|
||||||
about: Please use our Community Forum for making feature requests.
|
about: Please use this link to request new features or enhancements to existing features.
|
||||||
- name: I'm unsure where to go
|
- name: I'm unsure where to go
|
||||||
url: https://www.home-assistant.io/join-chat
|
url: https://www.home-assistant.io/join-chat
|
||||||
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
about: If you are unsure where to go, then joining our chat is recommended; Just ask!
|
||||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -46,6 +46,8 @@
|
|||||||
- This PR fixes or closes issue: fixes #
|
- This PR fixes or closes issue: fixes #
|
||||||
- This PR is related to issue:
|
- This PR is related to issue:
|
||||||
- Link to documentation pull request:
|
- Link to documentation pull request:
|
||||||
|
- Link to developer documentation pull request:
|
||||||
|
- Link to frontend pull request:
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
<!--
|
<!--
|
||||||
|
BIN
.github/assets/screenshot-integrations.png
vendored
BIN
.github/assets/screenshot-integrations.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 65 KiB After Width: | Height: | Size: 99 KiB |
1161
.github/copilot-instructions.md
vendored
Normal file
1161
.github/copilot-instructions.md
vendored
Normal file
File diff suppressed because it is too large
Load Diff
60
.github/workflows/builder.yml
vendored
60
.github/workflows/builder.yml
vendored
@ -10,7 +10,7 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_TYPE: core
|
BUILD_TYPE: core
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
PIP_TIMEOUT: 60
|
PIP_TIMEOUT: 60
|
||||||
UV_HTTP_TIMEOUT: 60
|
UV_HTTP_TIMEOUT: 60
|
||||||
UV_SYSTEM_PYTHON: "true"
|
UV_SYSTEM_PYTHON: "true"
|
||||||
@ -32,7 +32,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ jobs:
|
|||||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||||
|
|
||||||
- name: Upload translations
|
- name: Upload translations
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
path: translations.tar.gz
|
path: translations.tar.gz
|
||||||
@ -94,7 +94,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v6
|
uses: dawidd6/action-download-artifact@v11
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/frontend
|
repo: home-assistant/frontend
|
||||||
@ -105,10 +105,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Download nightly wheels of intents
|
- name: Download nightly wheels of intents
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: dawidd6/action-download-artifact@v6
|
uses: dawidd6/action-download-artifact@v11
|
||||||
with:
|
with:
|
||||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||||
repo: home-assistant/intents-package
|
repo: OHF-Voice/intents-package
|
||||||
branch: main
|
branch: main
|
||||||
workflow: nightly.yaml
|
workflow: nightly.yaml
|
||||||
workflow_conclusion: success
|
workflow_conclusion: success
|
||||||
@ -116,7 +116,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -175,7 +175,7 @@ jobs:
|
|||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@ -190,14 +190,14 @@ jobs:
|
|||||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build base image
|
- name: Build base image
|
||||||
uses: home-assistant/builder@2024.08.2
|
uses: home-assistant/builder@2025.03.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@ -256,14 +256,14 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build base image
|
- name: Build base image
|
||||||
uses: home-assistant/builder@2024.08.2
|
uses: home-assistant/builder@2025.03.0
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@ -324,20 +324,20 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.7.0
|
uses: sigstore/cosign-installer@v3.9.1
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.2.3"
|
cosign-release: "v2.2.3"
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: matrix.registry == 'docker.io/homeassistant'
|
if: matrix.registry == 'docker.io/homeassistant'
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@ -448,18 +448,21 @@ jobs:
|
|||||||
environment: ${{ needs.init.outputs.channel }}
|
environment: ${{ needs.init.outputs.channel }}
|
||||||
needs: ["init", "build_base"]
|
needs: ["init", "build_base"]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
id-token: write
|
||||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@ -473,16 +476,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Remove dist, build, and homeassistant.egg-info
|
# Remove dist, build, and homeassistant.egg-info
|
||||||
# when build locally for testing!
|
# when build locally for testing!
|
||||||
pip install twine build
|
pip install build
|
||||||
python -m build
|
python -m build
|
||||||
|
|
||||||
- name: Upload package
|
- name: Upload package to PyPI
|
||||||
shell: bash
|
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||||
run: |
|
with:
|
||||||
export TWINE_USERNAME="__token__"
|
skip-existing: true
|
||||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
|
||||||
|
|
||||||
twine upload dist/* --skip-existing
|
|
||||||
|
|
||||||
hassfest-image:
|
hassfest-image:
|
||||||
name: Build and test hassfest image
|
name: Build and test hassfest image
|
||||||
@ -502,14 +502,14 @@ jobs:
|
|||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build Docker image
|
- name: Build Docker image
|
||||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||||
with:
|
with:
|
||||||
context: . # So action will not pull the repository again
|
context: . # So action will not pull the repository again
|
||||||
file: ./script/hassfest/docker/Dockerfile
|
file: ./script/hassfest/docker/Dockerfile
|
||||||
@ -517,12 +517,12 @@ jobs:
|
|||||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||||
|
|
||||||
- name: Run hassfest against core
|
- name: Run hassfest against core
|
||||||
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
|
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
|
||||||
|
|
||||||
- name: Push Docker image
|
- name: Push Docker image
|
||||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||||
id: push
|
id: push
|
||||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||||
with:
|
with:
|
||||||
context: . # So action will not pull the repository again
|
context: . # So action will not pull the repository again
|
||||||
file: ./script/hassfest/docker/Dockerfile
|
file: ./script/hassfest/docker/Dockerfile
|
||||||
@ -531,7 +531,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate artifact attestation
|
- name: Generate artifact attestation
|
||||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||||
subject-digest: ${{ steps.push.outputs.digest }}
|
subject-digest: ${{ steps.push.outputs.digest }}
|
||||||
|
370
.github/workflows/ci.yaml
vendored
370
.github/workflows/ci.yaml
vendored
@ -37,12 +37,12 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 11
|
CACHE_VERSION: 3
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 9
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2024.12"
|
HA_SHORT_VERSION: "2025.8"
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||||
# 10.6 is the current long-term-support
|
# 10.6 is the current long-term-support
|
||||||
@ -89,6 +89,7 @@ jobs:
|
|||||||
test_groups: ${{ steps.info.outputs.test_groups }}
|
test_groups: ${{ steps.info.outputs.test_groups }}
|
||||||
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
tests_glob: ${{ steps.info.outputs.tests_glob }}
|
||||||
tests: ${{ steps.info.outputs.tests }}
|
tests: ${{ steps.info.outputs.tests }}
|
||||||
|
lint_only: ${{ steps.info.outputs.lint_only }}
|
||||||
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
skip_coverage: ${{ steps.info.outputs.skip_coverage }}
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
@ -142,6 +143,7 @@ jobs:
|
|||||||
test_group_count=10
|
test_group_count=10
|
||||||
tests="[]"
|
tests="[]"
|
||||||
tests_glob=""
|
tests_glob=""
|
||||||
|
lint_only=""
|
||||||
skip_coverage=""
|
skip_coverage=""
|
||||||
|
|
||||||
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
|
||||||
@ -192,6 +194,17 @@ jobs:
|
|||||||
test_full_suite="true"
|
test_full_suite="true"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|
||||||
|
|| [[ "${{ github.event_name }}" == "push" \
|
||||||
|
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
|
||||||
|
then
|
||||||
|
lint_only="true"
|
||||||
|
skip_coverage="true"
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|
||||||
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
|
||||||
then
|
then
|
||||||
@ -217,6 +230,8 @@ jobs:
|
|||||||
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
echo "tests=${tests}" >> $GITHUB_OUTPUT
|
||||||
echo "tests_glob: ${tests_glob}"
|
echo "tests_glob: ${tests_glob}"
|
||||||
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT
|
||||||
|
echo "lint_only": ${lint_only}
|
||||||
|
echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT
|
||||||
echo "skip_coverage: ${skip_coverage}"
|
echo "skip_coverage: ${skip_coverage}"
|
||||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
@ -234,17 +249,17 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.2
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Create Python virtual environment
|
- name: Create Python virtual environment
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
@ -256,12 +271,12 @@ jobs:
|
|||||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.1.2
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Install pre-commit dependencies
|
- name: Install pre-commit dependencies
|
||||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||||
@ -279,28 +294,28 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Run ruff-format
|
- name: Run ruff-format
|
||||||
run: |
|
run: |
|
||||||
@ -319,33 +334,33 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Run ruff
|
- name: Run ruff
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||||
env:
|
env:
|
||||||
RUFF_OUTPUT_FORMAT: github
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
@ -359,28 +374,28 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
|
|
||||||
- name: Register yamllint problem matcher
|
- name: Register yamllint problem matcher
|
||||||
@ -469,7 +484,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -482,23 +497,22 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.1.2
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
lookup-only: true
|
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore uv wheel cache
|
- name: Restore uv wheel cache
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4.1.2
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: ${{ env.UV_CACHE_DIR }}
|
path: ${{ env.UV_CACHE_DIR }}
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
steps.generate-uv-key.outputs.key }}
|
steps.generate-uv-key.outputs.key }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||||
env.HA_SHORT_VERSION }}-
|
env.HA_SHORT_VERSION }}-
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
@ -531,6 +545,26 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
|
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
|
||||||
uv pip install -e . --config-settings editable_mode=compat
|
uv pip install -e . --config-settings editable_mode=compat
|
||||||
|
- name: Dump pip freeze
|
||||||
|
run: |
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
python --version
|
||||||
|
uv pip freeze >> pip_freeze.txt
|
||||||
|
- name: Upload pip_freeze artifact
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: pip-freeze-${{ matrix.python-version }}
|
||||||
|
path: pip_freeze.txt
|
||||||
|
overwrite: true
|
||||||
|
- name: Remove pip_freeze
|
||||||
|
run: rm pip_freeze.txt
|
||||||
|
- name: Remove generated requirements_all
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: rm requirements_all_pytest.txt requirements_all_wheels_*.txt
|
||||||
|
- name: Check dirty
|
||||||
|
run: |
|
||||||
|
./script/check_dirty
|
||||||
|
|
||||||
hassfest:
|
hassfest:
|
||||||
name: Check hassfest
|
name: Check hassfest
|
||||||
@ -553,18 +587,18 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Run hassfest
|
- name: Run hassfest
|
||||||
run: |
|
run: |
|
||||||
@ -586,24 +620,43 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Run gen_requirements_all.py
|
- name: Run gen_requirements_all.py
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.gen_requirements_all validate
|
python -m script.gen_requirements_all validate
|
||||||
|
|
||||||
|
dependency-review:
|
||||||
|
name: Dependency review
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- info
|
||||||
|
- base
|
||||||
|
if: |
|
||||||
|
github.event.inputs.pylint-only != 'true'
|
||||||
|
&& github.event.inputs.mypy-only != 'true'
|
||||||
|
&& needs.info.outputs.requirements == 'true'
|
||||||
|
&& github.event_name == 'pull_request'
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.2.2
|
||||||
|
- name: Dependency review
|
||||||
|
uses: actions/dependency-review-action@v4.7.1
|
||||||
|
with:
|
||||||
|
license-check: false # We use our own license audit checks
|
||||||
|
|
||||||
audit-licenses:
|
audit-licenses:
|
||||||
name: Audit licenses
|
name: Audit licenses
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -624,25 +677,25 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Extract license data
|
- name: Extract license data
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||||
- name: Upload licenses
|
- name: Upload licenses
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||||
path: licenses-${{ matrix.python-version }}.json
|
path: licenses-${{ matrix.python-version }}.json
|
||||||
@ -667,18 +720,18 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Register pylint problem matcher
|
- name: Register pylint problem matcher
|
||||||
run: |
|
run: |
|
||||||
@ -714,18 +767,18 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Register pylint problem matcher
|
- name: Register pylint problem matcher
|
||||||
run: |
|
run: |
|
||||||
@ -759,7 +812,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -772,22 +825,22 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore mypy cache
|
- name: Restore mypy cache
|
||||||
uses: actions/cache@v4.1.2
|
uses: actions/cache@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: .mypy_cache
|
path: .mypy_cache
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
steps.generate-mypy-key.outputs.key }}
|
steps.generate-mypy-key.outputs.key }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-mypy-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-mypy-${{
|
||||||
env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}-${{
|
env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}-${{
|
||||||
env.HA_SHORT_VERSION }}-
|
env.HA_SHORT_VERSION }}-
|
||||||
- name: Register mypy problem matcher
|
- name: Register mypy problem matcher
|
||||||
@ -810,19 +863,17 @@ jobs:
|
|||||||
prepare-pytest-full:
|
prepare-pytest-full:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.test_full_suite == 'true'
|
&& needs.info.outputs.test_full_suite == 'true'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
- base
|
- base
|
||||||
strategy:
|
- gen-requirements-all
|
||||||
fail-fast: false
|
- hassfest
|
||||||
matrix:
|
- lint-other
|
||||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
- lint-ruff
|
||||||
|
- lint-ruff-format
|
||||||
|
- mypy
|
||||||
name: Split tests for full run
|
name: Split tests for full run
|
||||||
steps:
|
steps:
|
||||||
- name: Install additional OS dependencies
|
- name: Install additional OS dependencies
|
||||||
@ -836,40 +887,36 @@ jobs:
|
|||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Run split_tests.py
|
- name: Run split_tests.py
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||||
- name: Upload pytest_buckets
|
- name: Upload pytest_buckets
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets-${{ matrix.python-version }}
|
name: pytest_buckets
|
||||||
path: pytest_buckets.txt
|
path: pytest_buckets.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
pytest-full:
|
pytest-full:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.test_full_suite == 'true'
|
&& needs.info.outputs.test_full_suite == 'true'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -897,22 +944,24 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev \
|
||||||
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
@ -921,9 +970,9 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||||
- name: Download pytest_buckets
|
- name: Download pytest_buckets
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets-${{ matrix.python-version }}
|
name: pytest_buckets
|
||||||
- name: Compile English translations
|
- name: Compile English translations
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@ -941,6 +990,7 @@ jobs:
|
|||||||
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
|
||||||
cov_params+=(--cov="homeassistant")
|
cov_params+=(--cov="homeassistant")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||||
@ -954,22 +1004,35 @@ jobs:
|
|||||||
${cov_params[@]} \
|
${cov_params[@]} \
|
||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
-p no:sugar \
|
-p no:sugar \
|
||||||
|
--exclude-warning-annotations \
|
||||||
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
|
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
|
||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Beautify test results
|
||||||
|
# For easier identification of parsing errors
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
|
run: |
|
||||||
|
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||||
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
|
path: junit.xml
|
||||||
- name: Remove pytest_buckets
|
- name: Remove pytest_buckets
|
||||||
run: rm pytest_buckets.txt
|
run: rm pytest_buckets.txt
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
@ -987,11 +1050,7 @@ jobs:
|
|||||||
MYSQL_ROOT_PASSWORD: password
|
MYSQL_ROOT_PASSWORD: password
|
||||||
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.mariadb_groups != '[]'
|
&& needs.info.outputs.mariadb_groups != '[]'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -1018,22 +1077,24 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libmariadb-dev-compat
|
libmariadb-dev-compat \
|
||||||
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
@ -1066,6 +1127,7 @@ jobs:
|
|||||||
cov_params+=(--cov="homeassistant.components.recorder")
|
cov_params+=(--cov="homeassistant.components.recorder")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
@ -1077,6 +1139,7 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
--durations=10 \
|
--durations=10 \
|
||||||
-p no:sugar \
|
-p no:sugar \
|
||||||
|
--exclude-warning-annotations \
|
||||||
--dburl=mysql://root:password@127.0.0.1/homeassistant-test \
|
--dburl=mysql://root:password@127.0.0.1/homeassistant-test \
|
||||||
tests/components/history \
|
tests/components/history \
|
||||||
tests/components/logbook \
|
tests/components/logbook \
|
||||||
@ -1085,7 +1148,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@ -1093,12 +1156,25 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Beautify test results
|
||||||
|
# For easier identification of parsing errors
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
|
run: |
|
||||||
|
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||||
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||||
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
|
path: junit.xml
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1114,11 +1190,7 @@ jobs:
|
|||||||
POSTGRES_PASSWORD: password
|
POSTGRES_PASSWORD: password
|
||||||
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.postgresql_groups != '[]'
|
&& needs.info.outputs.postgresql_groups != '[]'
|
||||||
needs:
|
needs:
|
||||||
- info
|
- info
|
||||||
@ -1144,7 +1216,8 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg
|
libturbojpeg \
|
||||||
|
libxml2-utils
|
||||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
postgresql-server-dev-14
|
postgresql-server-dev-14
|
||||||
@ -1152,17 +1225,18 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
@ -1195,6 +1269,7 @@ jobs:
|
|||||||
cov_params+=(--cov="homeassistant.components.recorder")
|
cov_params+=(--cov="homeassistant.components.recorder")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
@ -1207,6 +1282,7 @@ jobs:
|
|||||||
--durations=0 \
|
--durations=0 \
|
||||||
--durations-min=10 \
|
--durations-min=10 \
|
||||||
-p no:sugar \
|
-p no:sugar \
|
||||||
|
--exclude-warning-annotations \
|
||||||
--dburl=postgresql://postgres:password@127.0.0.1/homeassistant-test \
|
--dburl=postgresql://postgres:password@127.0.0.1/homeassistant-test \
|
||||||
tests/components/history \
|
tests/components/history \
|
||||||
tests/components/logbook \
|
tests/components/logbook \
|
||||||
@ -1215,7 +1291,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@ -1223,12 +1299,25 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Beautify test results
|
||||||
|
# For easier identification of parsing errors
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
|
run: |
|
||||||
|
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||||
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||||
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
|
path: junit.xml
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1247,26 +1336,21 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
if: needs.info.outputs.test_full_suite == 'true'
|
||||||
uses: codecov/codecov-action@v4.6.0
|
uses: codecov/codecov-action@v5.4.3
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
flags: full-suite
|
flags: full-suite
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
version: v0.6.0
|
|
||||||
|
|
||||||
pytest-partial:
|
pytest-partial:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
if: |
|
if: |
|
||||||
(github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core')
|
needs.info.outputs.lint_only != 'true'
|
||||||
&& github.event.inputs.lint-only != 'true'
|
|
||||||
&& github.event.inputs.pylint-only != 'true'
|
|
||||||
&& github.event.inputs.mypy-only != 'true'
|
|
||||||
&& github.event.inputs.audit-licenses-only != 'true'
|
|
||||||
&& needs.info.outputs.tests_glob
|
&& needs.info.outputs.tests_glob
|
||||||
&& needs.info.outputs.test_full_suite == 'false'
|
&& needs.info.outputs.test_full_suite == 'false'
|
||||||
needs:
|
needs:
|
||||||
@ -1294,22 +1378,24 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev \
|
||||||
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.1.2
|
uses: actions/cache/restore@v4.2.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
key: >-
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
@ -1342,6 +1428,7 @@ jobs:
|
|||||||
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
|
||||||
cov_params+=(--cov-report=xml)
|
cov_params+=(--cov-report=xml)
|
||||||
cov_params+=(--cov-report=term-missing)
|
cov_params+=(--cov-report=term-missing)
|
||||||
|
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
@ -1354,22 +1441,35 @@ jobs:
|
|||||||
--durations=0 \
|
--durations=0 \
|
||||||
--durations-min=1 \
|
--durations-min=1 \
|
||||||
-p no:sugar \
|
-p no:sugar \
|
||||||
|
--exclude-warning-annotations \
|
||||||
tests/components/${{ matrix.group }} \
|
tests/components/${{ matrix.group }} \
|
||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
- name: Beautify test results
|
||||||
|
# For easier identification of parsing errors
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
|
run: |
|
||||||
|
xmllint --format "junit.xml" > "junit.xml-tmp"
|
||||||
|
mv "junit.xml-tmp" "junit.xml"
|
||||||
|
- name: Upload test results artifact
|
||||||
|
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
|
path: junit.xml
|
||||||
- name: Check dirty
|
- name: Check dirty
|
||||||
run: |
|
run: |
|
||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
@ -1386,13 +1486,37 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
if: needs.info.outputs.test_full_suite == 'false'
|
||||||
uses: codecov/codecov-action@v4.6.0
|
uses: codecov/codecov-action@v5.4.3
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
version: v0.6.0
|
|
||||||
|
upload-test-results:
|
||||||
|
name: Upload test results to Codecov
|
||||||
|
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||||
|
# therefore we can't run it on forks
|
||||||
|
if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }}
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- info
|
||||||
|
- pytest-partial
|
||||||
|
- pytest-full
|
||||||
|
- pytest-postgres
|
||||||
|
- pytest-mariadb
|
||||||
|
timeout-minutes: 10
|
||||||
|
steps:
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v4.3.0
|
||||||
|
with:
|
||||||
|
pattern: test-results-*
|
||||||
|
- name: Upload test results to Codecov
|
||||||
|
uses: codecov/test-results-action@v1
|
||||||
|
with:
|
||||||
|
fail_ci_if_error: true
|
||||||
|
verbose: true
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.27.0
|
uses: github/codeql-action/init@v3.29.0
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.27.0
|
uses: github/codeql-action/analyze@v3.29.0
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
385
.github/workflows/detect-duplicate-issues.yml
vendored
Normal file
385
.github/workflows/detect-duplicate-issues.yml
vendored
Normal file
@ -0,0 +1,385 @@
|
|||||||
|
name: Auto-detect duplicate issues
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
models: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
detect-duplicates:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check if integration label was added and extract details
|
||||||
|
id: extract
|
||||||
|
uses: actions/github-script@v7.0.1
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
// Debug: Log the event payload
|
||||||
|
console.log('Event name:', context.eventName);
|
||||||
|
console.log('Event action:', context.payload.action);
|
||||||
|
console.log('Event payload keys:', Object.keys(context.payload));
|
||||||
|
|
||||||
|
// Check the specific label that was added
|
||||||
|
const addedLabel = context.payload.label;
|
||||||
|
if (!addedLabel) {
|
||||||
|
console.log('No label found in labeled event payload');
|
||||||
|
core.setOutput('should_continue', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Label added: ${addedLabel.name}`);
|
||||||
|
|
||||||
|
if (!addedLabel.name.startsWith('integration:')) {
|
||||||
|
console.log('Added label is not an integration label, skipping duplicate detection');
|
||||||
|
core.setOutput('should_continue', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Integration label added: ${addedLabel.name}`);
|
||||||
|
|
||||||
|
let currentIssue;
|
||||||
|
let integrationLabels = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const issue = await github.rest.issues.get({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.payload.issue.number
|
||||||
|
});
|
||||||
|
|
||||||
|
currentIssue = issue.data;
|
||||||
|
|
||||||
|
// Check if potential-duplicate label already exists
|
||||||
|
const hasPotentialDuplicateLabel = currentIssue.labels
|
||||||
|
.some(label => label.name === 'potential-duplicate');
|
||||||
|
|
||||||
|
if (hasPotentialDuplicateLabel) {
|
||||||
|
console.log('Issue already has potential-duplicate label, skipping duplicate detection');
|
||||||
|
core.setOutput('should_continue', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
integrationLabels = currentIssue.labels
|
||||||
|
.filter(label => label.name.startsWith('integration:'))
|
||||||
|
.map(label => label.name);
|
||||||
|
} catch (error) {
|
||||||
|
core.error(`Failed to fetch issue #${context.payload.issue.number}:`, error.message);
|
||||||
|
core.setOutput('should_continue', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we've already posted a duplicate detection comment recently
|
||||||
|
let comments;
|
||||||
|
try {
|
||||||
|
comments = await github.rest.issues.listComments({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.payload.issue.number,
|
||||||
|
per_page: 10
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
core.error('Failed to fetch comments:', error.message);
|
||||||
|
// Continue anyway, worst case we might post a duplicate comment
|
||||||
|
comments = { data: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we've already posted a duplicate detection comment
|
||||||
|
const recentDuplicateComment = comments.data.find(comment =>
|
||||||
|
comment.user && comment.user.login === 'github-actions[bot]' &&
|
||||||
|
comment.body.includes('<!-- workflow: detect-duplicate-issues -->')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (recentDuplicateComment) {
|
||||||
|
console.log('Already posted duplicate detection comment, skipping');
|
||||||
|
core.setOutput('should_continue', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.setOutput('should_continue', 'true');
|
||||||
|
core.setOutput('current_number', currentIssue.number);
|
||||||
|
core.setOutput('current_title', currentIssue.title);
|
||||||
|
core.setOutput('current_body', currentIssue.body);
|
||||||
|
core.setOutput('current_url', currentIssue.html_url);
|
||||||
|
core.setOutput('integration_labels', JSON.stringify(integrationLabels));
|
||||||
|
|
||||||
|
console.log(`Current issue: #${currentIssue.number}`);
|
||||||
|
console.log(`Integration labels: ${integrationLabels.join(', ')}`);
|
||||||
|
|
||||||
|
- name: Fetch similar issues
|
||||||
|
id: fetch_similar
|
||||||
|
if: steps.extract.outputs.should_continue == 'true'
|
||||||
|
uses: actions/github-script@v7.0.1
|
||||||
|
env:
|
||||||
|
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||||
|
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const integrationLabels = JSON.parse(process.env.INTEGRATION_LABELS);
|
||||||
|
const currentNumber = parseInt(process.env.CURRENT_NUMBER);
|
||||||
|
|
||||||
|
if (integrationLabels.length === 0) {
|
||||||
|
console.log('No integration labels found, skipping duplicate detection');
|
||||||
|
core.setOutput('has_similar', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use GitHub search API to find issues with matching integration labels
|
||||||
|
console.log(`Searching for issues with integration labels: ${integrationLabels.join(', ')}`);
|
||||||
|
|
||||||
|
// Build search query for issues with any of the current integration labels
|
||||||
|
const labelQueries = integrationLabels.map(label => `label:"${label}"`);
|
||||||
|
|
||||||
|
// Calculate date 6 months ago
|
||||||
|
const sixMonthsAgo = new Date();
|
||||||
|
sixMonthsAgo.setMonth(sixMonthsAgo.getMonth() - 6);
|
||||||
|
const dateFilter = `created:>=${sixMonthsAgo.toISOString().split('T')[0]}`;
|
||||||
|
|
||||||
|
let searchQuery;
|
||||||
|
|
||||||
|
if (labelQueries.length === 1) {
|
||||||
|
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]} ${dateFilter}`;
|
||||||
|
} else {
|
||||||
|
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')}) ${dateFilter}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Search query: ${searchQuery}`);
|
||||||
|
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = await github.rest.search.issuesAndPullRequests({
|
||||||
|
q: searchQuery,
|
||||||
|
per_page: 15,
|
||||||
|
sort: 'updated',
|
||||||
|
order: 'desc'
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
core.error('Failed to search for similar issues:', error.message);
|
||||||
|
if (error.status === 403 && error.message.includes('rate limit')) {
|
||||||
|
core.error('GitHub API rate limit exceeded');
|
||||||
|
}
|
||||||
|
core.setOutput('has_similar', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out the current issue, pull requests, and newer issues (higher numbers)
|
||||||
|
const similarIssues = result.data.items
|
||||||
|
.filter(item =>
|
||||||
|
item.number !== currentNumber &&
|
||||||
|
!item.pull_request &&
|
||||||
|
item.number < currentNumber // Only include older issues (lower numbers)
|
||||||
|
)
|
||||||
|
.map(item => ({
|
||||||
|
number: item.number,
|
||||||
|
title: item.title,
|
||||||
|
body: item.body,
|
||||||
|
url: item.html_url,
|
||||||
|
state: item.state,
|
||||||
|
createdAt: item.created_at,
|
||||||
|
updatedAt: item.updated_at,
|
||||||
|
comments: item.comments,
|
||||||
|
labels: item.labels.map(l => l.name)
|
||||||
|
}));
|
||||||
|
|
||||||
|
console.log(`Found ${similarIssues.length} issues with matching integration labels`);
|
||||||
|
console.log('Raw similar issues:', JSON.stringify(similarIssues.slice(0, 3), null, 2));
|
||||||
|
|
||||||
|
if (similarIssues.length === 0) {
|
||||||
|
console.log('No similar issues found, setting has_similar to false');
|
||||||
|
core.setOutput('has_similar', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Similar issues found, setting has_similar to true');
|
||||||
|
core.setOutput('has_similar', 'true');
|
||||||
|
|
||||||
|
// Clean the issue data to prevent JSON parsing issues
|
||||||
|
const cleanedIssues = similarIssues.slice(0, 15).map(item => {
|
||||||
|
// Handle body with improved truncation and null handling
|
||||||
|
let cleanBody = '';
|
||||||
|
if (item.body && typeof item.body === 'string') {
|
||||||
|
// Remove control characters
|
||||||
|
const cleaned = item.body.replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||||
|
// Truncate to 1000 characters and add ellipsis if needed
|
||||||
|
cleanBody = cleaned.length > 1000
|
||||||
|
? cleaned.substring(0, 1000) + '...'
|
||||||
|
: cleaned;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
number: item.number,
|
||||||
|
title: item.title.replace(/[\u0000-\u001F\u007F-\u009F]/g, ''), // Remove control characters
|
||||||
|
body: cleanBody,
|
||||||
|
url: item.url,
|
||||||
|
state: item.state,
|
||||||
|
createdAt: item.createdAt,
|
||||||
|
updatedAt: item.updatedAt,
|
||||||
|
comments: item.comments,
|
||||||
|
labels: item.labels
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Cleaned issues count: ${cleanedIssues.length}`);
|
||||||
|
console.log('First cleaned issue:', JSON.stringify(cleanedIssues[0], null, 2));
|
||||||
|
|
||||||
|
core.setOutput('similar_issues', JSON.stringify(cleanedIssues));
|
||||||
|
|
||||||
|
- name: Detect duplicates using AI
|
||||||
|
id: ai_detection
|
||||||
|
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||||
|
uses: actions/ai-inference@v1.1.0
|
||||||
|
with:
|
||||||
|
model: openai/gpt-4o
|
||||||
|
system-prompt: |
|
||||||
|
You are a Home Assistant issue duplicate detector. Your task is to identify TRUE DUPLICATES - issues that report the EXACT SAME problem, not just similar or related issues.
|
||||||
|
|
||||||
|
CRITICAL: An issue is ONLY a duplicate if:
|
||||||
|
- It describes the SAME problem with the SAME root cause
|
||||||
|
- Issues about the same integration but different problems are NOT duplicates
|
||||||
|
- Issues with similar symptoms but different causes are NOT duplicates
|
||||||
|
|
||||||
|
Important considerations:
|
||||||
|
- Open issues are more relevant than closed ones for duplicate detection
|
||||||
|
- Recently updated issues may indicate ongoing work or discussion
|
||||||
|
- Issues with more comments are generally more relevant and active
|
||||||
|
- Older closed issues might be resolved differently than newer approaches
|
||||||
|
- Consider the time between issues - very old issues may have different contexts
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
1. ONLY mark as duplicate if the issues describe IDENTICAL problems
|
||||||
|
2. Look for issues that report the same problem or request the same functionality
|
||||||
|
3. Different error messages = NOT a duplicate (even if same integration)
|
||||||
|
4. For CLOSED issues, only mark as duplicate if they describe the EXACT same problem
|
||||||
|
5. For OPEN issues, use a lower threshold (90%+ similarity)
|
||||||
|
6. Prioritize issues with higher comment counts as they indicate more activity/relevance
|
||||||
|
7. When in doubt, do NOT mark as duplicate
|
||||||
|
8. Return ONLY a JSON array of issue numbers that are duplicates
|
||||||
|
9. If no duplicates are found, return an empty array: []
|
||||||
|
10. Maximum 5 potential duplicates, prioritize open issues with comments
|
||||||
|
11. Consider the age of issues - prefer recent duplicates over very old ones
|
||||||
|
|
||||||
|
Example response format:
|
||||||
|
[1234, 5678, 9012]
|
||||||
|
|
||||||
|
prompt: |
|
||||||
|
Current issue (just created):
|
||||||
|
Title: ${{ steps.extract.outputs.current_title }}
|
||||||
|
Body: ${{ steps.extract.outputs.current_body }}
|
||||||
|
|
||||||
|
Other issues to compare against (each includes state, creation date, last update, and comment count):
|
||||||
|
${{ steps.fetch_similar.outputs.similar_issues }}
|
||||||
|
|
||||||
|
Analyze these issues and identify which ones describe IDENTICAL problems and thus are duplicates of the current issue. When sorting them, consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
|
||||||
|
|
||||||
|
max-tokens: 100
|
||||||
|
|
||||||
|
- name: Post duplicate detection results
|
||||||
|
id: post_results
|
||||||
|
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||||
|
uses: actions/github-script@v7.0.1
|
||||||
|
env:
|
||||||
|
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||||
|
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const aiResponse = process.env.AI_RESPONSE;
|
||||||
|
|
||||||
|
console.log('Raw AI response:', JSON.stringify(aiResponse));
|
||||||
|
|
||||||
|
let duplicateNumbers = [];
|
||||||
|
try {
|
||||||
|
// Clean the response of any potential control characters
|
||||||
|
const cleanResponse = aiResponse.trim().replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||||
|
console.log('Cleaned AI response:', cleanResponse);
|
||||||
|
|
||||||
|
duplicateNumbers = JSON.parse(cleanResponse);
|
||||||
|
|
||||||
|
// Ensure it's an array and contains only numbers
|
||||||
|
if (!Array.isArray(duplicateNumbers)) {
|
||||||
|
console.log('AI response is not an array, trying to extract numbers');
|
||||||
|
const numberMatches = cleanResponse.match(/\d+/g);
|
||||||
|
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter to only valid numbers
|
||||||
|
duplicateNumbers = duplicateNumbers.filter(n => typeof n === 'number' && !isNaN(n));
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.log('Failed to parse AI response as JSON:', error.message);
|
||||||
|
console.log('Raw response:', aiResponse);
|
||||||
|
|
||||||
|
// Fallback: try to extract numbers from the response
|
||||||
|
const numberMatches = aiResponse.match(/\d+/g);
|
||||||
|
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||||
|
console.log('Extracted numbers as fallback:', duplicateNumbers);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(duplicateNumbers) || duplicateNumbers.length === 0) {
|
||||||
|
console.log('No duplicates detected by AI');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`AI detected ${duplicateNumbers.length} potential duplicates: ${duplicateNumbers.join(', ')}`);
|
||||||
|
|
||||||
|
// Get details of detected duplicates
|
||||||
|
const similarIssues = JSON.parse(process.env.SIMILAR_ISSUES);
|
||||||
|
const duplicates = similarIssues.filter(issue => duplicateNumbers.includes(issue.number));
|
||||||
|
|
||||||
|
if (duplicates.length === 0) {
|
||||||
|
console.log('No matching issues found for detected numbers');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create comment with duplicate detection results
|
||||||
|
const duplicateLinks = duplicates.map(issue => `- [#${issue.number}: ${issue.title}](${issue.url})`).join('\n');
|
||||||
|
|
||||||
|
const commentBody = [
|
||||||
|
'<!-- workflow: detect-duplicate-issues -->',
|
||||||
|
'### 🔍 **Potential duplicate detection**',
|
||||||
|
'',
|
||||||
|
'I\'ve analyzed similar issues and found the following potential duplicates:',
|
||||||
|
'',
|
||||||
|
duplicateLinks,
|
||||||
|
'',
|
||||||
|
'**What to do next:**',
|
||||||
|
'1. Please review these issues to see if they match your issue',
|
||||||
|
'2. If you find an existing issue that covers your problem:',
|
||||||
|
' - Consider closing this issue',
|
||||||
|
' - Add your findings or 👍 on the existing issue instead',
|
||||||
|
'3. If your issue is different or adds new aspects, please clarify how it differs',
|
||||||
|
'',
|
||||||
|
'This helps keep our issues organized and ensures similar issues are consolidated for better visibility.',
|
||||||
|
'',
|
||||||
|
'*This message was generated automatically by our duplicate detection system.*'
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.payload.issue.number,
|
||||||
|
body: commentBody
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Posted duplicate detection comment with ${duplicates.length} potential duplicates`);
|
||||||
|
|
||||||
|
// Add the potential-duplicate label
|
||||||
|
await github.rest.issues.addLabels({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.payload.issue.number,
|
||||||
|
labels: ['potential-duplicate']
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Added potential-duplicate label to the issue');
|
||||||
|
} catch (error) {
|
||||||
|
core.error('Failed to post duplicate detection comment or add label:', error.message);
|
||||||
|
if (error.status === 403) {
|
||||||
|
core.error('Permission denied or rate limit exceeded');
|
||||||
|
}
|
||||||
|
// Don't throw - we've done the analysis, just couldn't post the result
|
||||||
|
}
|
193
.github/workflows/detect-non-english-issues.yml
vendored
Normal file
193
.github/workflows/detect-non-english-issues.yml
vendored
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
name: Auto-detect non-English issues
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
models: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
detect-language:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check issue language
|
||||||
|
id: detect_language
|
||||||
|
uses: actions/github-script@v7.0.1
|
||||||
|
env:
|
||||||
|
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||||
|
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||||
|
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||||
|
ISSUE_USER_TYPE: ${{ github.event.issue.user.type }}
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
// Get the issue details from environment variables
|
||||||
|
const issueNumber = process.env.ISSUE_NUMBER;
|
||||||
|
const issueTitle = process.env.ISSUE_TITLE || '';
|
||||||
|
const issueBody = process.env.ISSUE_BODY || '';
|
||||||
|
const userType = process.env.ISSUE_USER_TYPE;
|
||||||
|
|
||||||
|
// Skip language detection for bot users
|
||||||
|
if (userType === 'Bot') {
|
||||||
|
console.log('Skipping language detection for bot user');
|
||||||
|
core.setOutput('should_continue', 'false');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Checking language for issue #${issueNumber}`);
|
||||||
|
console.log(`Title: ${issueTitle}`);
|
||||||
|
|
||||||
|
// Combine title and body for language detection
|
||||||
|
const fullText = `${issueTitle}\n\n${issueBody}`;
|
||||||
|
|
||||||
|
// Check if the text is too short to reliably detect language
|
||||||
|
if (fullText.trim().length < 20) {
|
||||||
|
console.log('Text too short for reliable language detection');
|
||||||
|
core.setOutput('should_continue', 'false'); // Skip processing for very short text
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.setOutput('issue_number', issueNumber);
|
||||||
|
core.setOutput('issue_text', fullText);
|
||||||
|
core.setOutput('should_continue', 'true');
|
||||||
|
|
||||||
|
- name: Detect language using AI
|
||||||
|
id: ai_language_detection
|
||||||
|
if: steps.detect_language.outputs.should_continue == 'true'
|
||||||
|
uses: actions/ai-inference@v1.1.0
|
||||||
|
with:
|
||||||
|
model: openai/gpt-4o-mini
|
||||||
|
system-prompt: |
|
||||||
|
You are a language detection system. Your task is to determine if the provided text is written in English or another language.
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
1. Analyze the text and determine the primary language of the USER'S DESCRIPTION only
|
||||||
|
2. IGNORE markdown headers (lines starting with #, ##, ###, etc.) as these are from issue templates, not user input
|
||||||
|
3. IGNORE all code blocks (text between ``` or ` markers) as they may contain system-generated error messages in other languages
|
||||||
|
4. IGNORE error messages, logs, and system output even if not in code blocks - these often appear in the user's system language
|
||||||
|
5. Consider technical terms, code snippets, URLs, and file paths as neutral (they don't indicate non-English)
|
||||||
|
6. Focus ONLY on the actual sentences and descriptions written by the user explaining their issue
|
||||||
|
7. If the user's explanation/description is in English but includes non-English error messages or logs, consider it ENGLISH
|
||||||
|
8. Return ONLY a JSON object with two fields:
|
||||||
|
- "is_english": boolean (true if the user's description is primarily in English, false otherwise)
|
||||||
|
- "detected_language": string (the name of the detected language, e.g., "English", "Spanish", "Chinese", etc.)
|
||||||
|
9. Be lenient - if the user's explanation is in English with non-English system output, it's still English
|
||||||
|
10. Common programming terms, error messages, and technical jargon should not be considered as non-English
|
||||||
|
11. If you cannot reliably determine the language, set detected_language to "undefined"
|
||||||
|
|
||||||
|
Example response:
|
||||||
|
{"is_english": false, "detected_language": "Spanish"}
|
||||||
|
|
||||||
|
prompt: |
|
||||||
|
Please analyze the following issue text and determine if it is written in English:
|
||||||
|
|
||||||
|
${{ steps.detect_language.outputs.issue_text }}
|
||||||
|
|
||||||
|
max-tokens: 50
|
||||||
|
|
||||||
|
- name: Process non-English issues
|
||||||
|
if: steps.detect_language.outputs.should_continue == 'true'
|
||||||
|
uses: actions/github-script@v7.0.1
|
||||||
|
env:
|
||||||
|
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||||
|
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const issueNumber = parseInt(process.env.ISSUE_NUMBER);
|
||||||
|
const aiResponse = process.env.AI_RESPONSE;
|
||||||
|
|
||||||
|
console.log('AI language detection response:', aiResponse);
|
||||||
|
|
||||||
|
let languageResult;
|
||||||
|
try {
|
||||||
|
languageResult = JSON.parse(aiResponse.trim());
|
||||||
|
|
||||||
|
// Validate the response structure
|
||||||
|
if (!languageResult || typeof languageResult.is_english !== 'boolean') {
|
||||||
|
throw new Error('Invalid response structure');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
core.error(`Failed to parse AI response: ${error.message}`);
|
||||||
|
console.log('Raw AI response:', aiResponse);
|
||||||
|
|
||||||
|
// Log more details for debugging
|
||||||
|
core.warning('Defaulting to English due to parsing error');
|
||||||
|
|
||||||
|
// Default to English if we can't parse the response
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (languageResult.is_english) {
|
||||||
|
console.log('Issue is in English, no action needed');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If language is undefined or not detected, skip processing
|
||||||
|
if (!languageResult.detected_language || languageResult.detected_language === 'undefined') {
|
||||||
|
console.log('Language could not be determined, skipping processing');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Issue detected as non-English: ${languageResult.detected_language}`);
|
||||||
|
|
||||||
|
// Post comment explaining the language requirement
|
||||||
|
const commentBody = [
|
||||||
|
'<!-- workflow: detect-non-english-issues -->',
|
||||||
|
'### 🌐 Non-English issue detected',
|
||||||
|
'',
|
||||||
|
`This issue appears to be written in **${languageResult.detected_language}** rather than English.`,
|
||||||
|
'',
|
||||||
|
'The Home Assistant project uses English as the primary language for issues to ensure that everyone in our international community can participate and help resolve issues. This allows any of our thousands of contributors to jump in and provide assistance.',
|
||||||
|
'',
|
||||||
|
'**What to do:**',
|
||||||
|
'1. Re-create the issue using the English language',
|
||||||
|
'2. If you need help with translation, consider using:',
|
||||||
|
' - Translation tools like Google Translate',
|
||||||
|
' - AI assistants like ChatGPT or Claude',
|
||||||
|
'',
|
||||||
|
'This helps our community provide the best possible support and ensures your issue gets the attention it deserves from our global contributor base.',
|
||||||
|
'',
|
||||||
|
'Thank you for your understanding! 🙏'
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Add comment
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
body: commentBody
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Posted language requirement comment');
|
||||||
|
|
||||||
|
// Add non-english label
|
||||||
|
await github.rest.issues.addLabels({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
labels: ['non-english']
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Added non-english label');
|
||||||
|
|
||||||
|
// Close the issue
|
||||||
|
await github.rest.issues.update({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: issueNumber,
|
||||||
|
state: 'closed',
|
||||||
|
state_reason: 'not_planned'
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Closed the issue');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
core.error('Failed to process non-English issue:', error.message);
|
||||||
|
if (error.status === 403) {
|
||||||
|
core.error('Permission denied or rate limit exceeded');
|
||||||
|
}
|
||||||
|
}
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
# - No PRs marked as no-stale
|
# - No PRs marked as no-stale
|
||||||
# - No issues (-1)
|
# - No issues (-1)
|
||||||
- name: 60 days stale PRs policy
|
- name: 60 days stale PRs policy
|
||||||
uses: actions/stale@v9.0.0
|
uses: actions/stale@v9.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
@ -57,7 +57,7 @@ jobs:
|
|||||||
# - No issues marked as no-stale or help-wanted
|
# - No issues marked as no-stale or help-wanted
|
||||||
# - No PRs (-1)
|
# - No PRs (-1)
|
||||||
- name: 90 days stale issues
|
- name: 90 days stale issues
|
||||||
uses: actions/stale@v9.0.0
|
uses: actions/stale@v9.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.token.outputs.token }}
|
repo-token: ${{ steps.token.outputs.token }}
|
||||||
days-before-stale: 90
|
days-before-stale: 90
|
||||||
@ -87,7 +87,7 @@ jobs:
|
|||||||
# - No Issues marked as no-stale or help-wanted
|
# - No Issues marked as no-stale or help-wanted
|
||||||
# - No PRs (-1)
|
# - No PRs (-1)
|
||||||
- name: Needs more information stale issues policy
|
- name: Needs more information stale issues policy
|
||||||
uses: actions/stale@v9.0.0
|
uses: actions/stale@v9.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.token.outputs.token }}
|
repo-token: ${{ steps.token.outputs.token }}
|
||||||
only-labels: "needs-more-information"
|
only-labels: "needs-more-information"
|
||||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@ -10,7 +10,7 @@ on:
|
|||||||
- "**strings.json"
|
- "**strings.json"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
upload:
|
upload:
|
||||||
@ -22,7 +22,7 @@ jobs:
|
|||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
|
130
.github/workflows/wheels.yml
vendored
130
.github/workflows/wheels.yml
vendored
@ -17,7 +17,7 @@ on:
|
|||||||
- "script/gen_requirements_all.py"
|
- "script/gen_requirements_all.py"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||||
@ -36,7 +36,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.3.0
|
uses: actions/setup-python@v5.6.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -76,18 +76,37 @@ jobs:
|
|||||||
|
|
||||||
# Use C-Extension for SQLAlchemy
|
# Use C-Extension for SQLAlchemy
|
||||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||||
|
|
||||||
|
# Add additional pip wheel build constraints
|
||||||
|
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||||
) > .env_file
|
) > .env_file
|
||||||
|
|
||||||
|
- name: Write pip wheel build constraints
|
||||||
|
run: |
|
||||||
|
(
|
||||||
|
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||||
|
# this caused the numpy builds to fail
|
||||||
|
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||||
|
echo "ninja==1.11.1.1"
|
||||||
|
) > build_constraints.txt
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
include-hidden-files: true
|
include-hidden-files: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
|
- name: Upload build_constraints
|
||||||
|
uses: actions/upload-artifact@v4.6.2
|
||||||
|
with:
|
||||||
|
name: build_constraints
|
||||||
|
path: ./build_constraints.txt
|
||||||
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
@ -99,7 +118,7 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
|
|
||||||
- name: Upload requirements_all_wheels
|
- name: Upload requirements_all_wheels
|
||||||
uses: actions/upload-artifact@v4.4.3
|
uses: actions/upload-artifact@v4.6.2
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
path: ./requirements_all_wheels_*.txt
|
path: ./requirements_all_wheels_*.txt
|
||||||
@ -112,19 +131,24 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
abi: ["cp312"]
|
abi: ["cp313"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
|
- name: Download build_constraints
|
||||||
|
uses: actions/download-artifact@v4.3.0
|
||||||
|
with:
|
||||||
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
@ -135,15 +159,15 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2025.03.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
|
||||||
skip-binary: aiohttp;multidict;yarl
|
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements.txt"
|
requirements: "requirements.txt"
|
||||||
@ -156,24 +180,29 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
abi: ["cp312"]
|
abi: ["cp313"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
|
- name: Download build_constraints
|
||||||
|
uses: actions/download-artifact@v4.3.0
|
||||||
|
with:
|
||||||
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.3.0
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
|
|
||||||
@ -189,77 +218,16 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements.txt
|
sed -i "/uv/d" requirements.txt
|
||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Split requirements all
|
- name: Build wheels
|
||||||
run: |
|
uses: home-assistant/wheels@2025.03.0
|
||||||
# We split requirements all into multiple files.
|
|
||||||
# This is to prevent the build from running out of memory when
|
|
||||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
|
||||||
|
|
||||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
|
||||||
|
|
||||||
- name: Create requirements for cython<3
|
|
||||||
run: |
|
|
||||||
# Some dependencies still require 'cython<3'
|
|
||||||
# and don't yet use isolated build environments.
|
|
||||||
# Build these first.
|
|
||||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
|
||||||
|
|
||||||
touch requirements_old-cython.txt
|
|
||||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
|
||||||
|
|
||||||
- name: Build wheels (old cython)
|
|
||||||
uses: home-assistant/wheels@2024.07.1
|
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_old-cython.txt"
|
requirements: "requirements_all.txt"
|
||||||
pip: "'cython<3'"
|
|
||||||
|
|
||||||
- name: Build wheels (part 1)
|
|
||||||
uses: home-assistant/wheels@2024.07.1
|
|
||||||
with:
|
|
||||||
abi: ${{ matrix.abi }}
|
|
||||||
tag: musllinux_1_2
|
|
||||||
arch: ${{ matrix.arch }}
|
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
|
||||||
env-file: true
|
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
|
||||||
requirements-diff: "requirements_diff.txt"
|
|
||||||
requirements: "requirements_all.txtaa"
|
|
||||||
|
|
||||||
- name: Build wheels (part 2)
|
|
||||||
uses: home-assistant/wheels@2024.07.1
|
|
||||||
with:
|
|
||||||
abi: ${{ matrix.abi }}
|
|
||||||
tag: musllinux_1_2
|
|
||||||
arch: ${{ matrix.arch }}
|
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
|
||||||
env-file: true
|
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
|
||||||
requirements-diff: "requirements_diff.txt"
|
|
||||||
requirements: "requirements_all.txtab"
|
|
||||||
|
|
||||||
- name: Build wheels (part 3)
|
|
||||||
uses: home-assistant/wheels@2024.07.1
|
|
||||||
with:
|
|
||||||
abi: ${{ matrix.abi }}
|
|
||||||
tag: musllinux_1_2
|
|
||||||
arch: ${{ matrix.arch }}
|
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
|
||||||
env-file: true
|
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
|
||||||
requirements-diff: "requirements_diff.txt"
|
|
||||||
requirements: "requirements_all.txtac"
|
|
||||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@ -69,6 +69,7 @@ test-reports/
|
|||||||
test-results.xml
|
test-results.xml
|
||||||
test-output.xml
|
test-output.xml
|
||||||
pytest-*.txt
|
pytest-*.txt
|
||||||
|
junit.xml
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
@ -136,4 +137,8 @@ tmp_cache
|
|||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
# Will be created from script/split_tests.py
|
# Will be created from script/split_tests.py
|
||||||
pytest_buckets.txt
|
pytest_buckets.txt
|
||||||
|
|
||||||
|
# AI tooling
|
||||||
|
.claude
|
||||||
|
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.7.2
|
rev: v0.12.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff-check
|
||||||
args:
|
args:
|
||||||
- --fix
|
- --fix
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: v2.3.0
|
rev: v2.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
args:
|
args:
|
||||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
- --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||||
- --quiet-level=2
|
- --quiet-level=2
|
||||||
exclude_types: [csv, json, html]
|
exclude_types: [csv, json, html]
|
||||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
@ -30,7 +30,7 @@ repos:
|
|||||||
- --branch=master
|
- --branch=master
|
||||||
- --branch=rc
|
- --branch=rc
|
||||||
- repo: https://github.com/adrienverge/yamllint.git
|
- repo: https://github.com/adrienverge/yamllint.git
|
||||||
rev: v1.35.1
|
rev: v1.37.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: yamllint
|
- id: yamllint
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
@ -61,13 +61,14 @@ repos:
|
|||||||
name: mypy
|
name: mypy
|
||||||
entry: script/run-in-env.sh mypy
|
entry: script/run-in-env.sh mypy
|
||||||
language: script
|
language: script
|
||||||
types_or: [python, pyi]
|
|
||||||
require_serial: true
|
require_serial: true
|
||||||
|
types_or: [python, pyi]
|
||||||
files: ^(homeassistant|pylint)/.+\.(py|pyi)$
|
files: ^(homeassistant|pylint)/.+\.(py|pyi)$
|
||||||
- id: pylint
|
- id: pylint
|
||||||
name: pylint
|
name: pylint
|
||||||
entry: script/run-in-env.sh pylint -j 0 --ignore-missing-annotations=y
|
entry: script/run-in-env.sh pylint --ignore-missing-annotations=y
|
||||||
language: script
|
language: script
|
||||||
|
require_serial: true
|
||||||
types_or: [python, pyi]
|
types_or: [python, pyi]
|
||||||
files: ^(homeassistant|tests)/.+\.(py|pyi)$
|
files: ^(homeassistant|tests)/.+\.(py|pyi)$
|
||||||
- id: gen_requirements_all
|
- id: gen_requirements_all
|
||||||
@ -83,14 +84,14 @@ repos:
|
|||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
language: script
|
language: script
|
||||||
types: [text]
|
types: [text]
|
||||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||||
- id: hassfest-metadata
|
- id: hassfest-metadata
|
||||||
name: hassfest-metadata
|
name: hassfest-metadata
|
||||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
language: script
|
language: script
|
||||||
types: [text]
|
types: [text]
|
||||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||||
- id: hassfest-mypy-config
|
- id: hassfest-mypy-config
|
||||||
name: hassfest-mypy-config
|
name: hassfest-mypy-config
|
||||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||||
|
@ -41,6 +41,7 @@ homeassistant.util.unit_system
|
|||||||
# --- Add components below this line ---
|
# --- Add components below this line ---
|
||||||
homeassistant.components
|
homeassistant.components
|
||||||
homeassistant.components.abode.*
|
homeassistant.components.abode.*
|
||||||
|
homeassistant.components.acaia.*
|
||||||
homeassistant.components.accuweather.*
|
homeassistant.components.accuweather.*
|
||||||
homeassistant.components.acer_projector.*
|
homeassistant.components.acer_projector.*
|
||||||
homeassistant.components.acmeda.*
|
homeassistant.components.acmeda.*
|
||||||
@ -64,7 +65,9 @@ homeassistant.components.aladdin_connect.*
|
|||||||
homeassistant.components.alarm_control_panel.*
|
homeassistant.components.alarm_control_panel.*
|
||||||
homeassistant.components.alert.*
|
homeassistant.components.alert.*
|
||||||
homeassistant.components.alexa.*
|
homeassistant.components.alexa.*
|
||||||
|
homeassistant.components.alexa_devices.*
|
||||||
homeassistant.components.alpha_vantage.*
|
homeassistant.components.alpha_vantage.*
|
||||||
|
homeassistant.components.altruist.*
|
||||||
homeassistant.components.amazon_polly.*
|
homeassistant.components.amazon_polly.*
|
||||||
homeassistant.components.amberelectric.*
|
homeassistant.components.amberelectric.*
|
||||||
homeassistant.components.ambient_network.*
|
homeassistant.components.ambient_network.*
|
||||||
@ -102,6 +105,7 @@ homeassistant.components.auth.*
|
|||||||
homeassistant.components.automation.*
|
homeassistant.components.automation.*
|
||||||
homeassistant.components.awair.*
|
homeassistant.components.awair.*
|
||||||
homeassistant.components.axis.*
|
homeassistant.components.axis.*
|
||||||
|
homeassistant.components.azure_storage.*
|
||||||
homeassistant.components.backup.*
|
homeassistant.components.backup.*
|
||||||
homeassistant.components.baf.*
|
homeassistant.components.baf.*
|
||||||
homeassistant.components.bang_olufsen.*
|
homeassistant.components.bang_olufsen.*
|
||||||
@ -117,7 +121,9 @@ homeassistant.components.bluetooth_adapters.*
|
|||||||
homeassistant.components.bluetooth_tracker.*
|
homeassistant.components.bluetooth_tracker.*
|
||||||
homeassistant.components.bmw_connected_drive.*
|
homeassistant.components.bmw_connected_drive.*
|
||||||
homeassistant.components.bond.*
|
homeassistant.components.bond.*
|
||||||
|
homeassistant.components.bosch_alarm.*
|
||||||
homeassistant.components.braviatv.*
|
homeassistant.components.braviatv.*
|
||||||
|
homeassistant.components.bring.*
|
||||||
homeassistant.components.brother.*
|
homeassistant.components.brother.*
|
||||||
homeassistant.components.browser.*
|
homeassistant.components.browser.*
|
||||||
homeassistant.components.bryant_evolution.*
|
homeassistant.components.bryant_evolution.*
|
||||||
@ -133,9 +139,11 @@ homeassistant.components.clicksend.*
|
|||||||
homeassistant.components.climate.*
|
homeassistant.components.climate.*
|
||||||
homeassistant.components.cloud.*
|
homeassistant.components.cloud.*
|
||||||
homeassistant.components.co2signal.*
|
homeassistant.components.co2signal.*
|
||||||
|
homeassistant.components.comelit.*
|
||||||
homeassistant.components.command_line.*
|
homeassistant.components.command_line.*
|
||||||
homeassistant.components.config.*
|
homeassistant.components.config.*
|
||||||
homeassistant.components.configurator.*
|
homeassistant.components.configurator.*
|
||||||
|
homeassistant.components.cookidoo.*
|
||||||
homeassistant.components.counter.*
|
homeassistant.components.counter.*
|
||||||
homeassistant.components.cover.*
|
homeassistant.components.cover.*
|
||||||
homeassistant.components.cpuspeed.*
|
homeassistant.components.cpuspeed.*
|
||||||
@ -168,6 +176,7 @@ homeassistant.components.easyenergy.*
|
|||||||
homeassistant.components.ecovacs.*
|
homeassistant.components.ecovacs.*
|
||||||
homeassistant.components.ecowitt.*
|
homeassistant.components.ecowitt.*
|
||||||
homeassistant.components.efergy.*
|
homeassistant.components.efergy.*
|
||||||
|
homeassistant.components.eheimdigital.*
|
||||||
homeassistant.components.electrasmart.*
|
homeassistant.components.electrasmart.*
|
||||||
homeassistant.components.electric_kiwi.*
|
homeassistant.components.electric_kiwi.*
|
||||||
homeassistant.components.elevenlabs.*
|
homeassistant.components.elevenlabs.*
|
||||||
@ -214,6 +223,7 @@ homeassistant.components.goalzero.*
|
|||||||
homeassistant.components.google.*
|
homeassistant.components.google.*
|
||||||
homeassistant.components.google_assistant_sdk.*
|
homeassistant.components.google_assistant_sdk.*
|
||||||
homeassistant.components.google_cloud.*
|
homeassistant.components.google_cloud.*
|
||||||
|
homeassistant.components.google_drive.*
|
||||||
homeassistant.components.google_photos.*
|
homeassistant.components.google_photos.*
|
||||||
homeassistant.components.google_sheets.*
|
homeassistant.components.google_sheets.*
|
||||||
homeassistant.components.govee_ble.*
|
homeassistant.components.govee_ble.*
|
||||||
@ -221,18 +231,22 @@ homeassistant.components.gpsd.*
|
|||||||
homeassistant.components.greeneye_monitor.*
|
homeassistant.components.greeneye_monitor.*
|
||||||
homeassistant.components.group.*
|
homeassistant.components.group.*
|
||||||
homeassistant.components.guardian.*
|
homeassistant.components.guardian.*
|
||||||
|
homeassistant.components.habitica.*
|
||||||
homeassistant.components.hardkernel.*
|
homeassistant.components.hardkernel.*
|
||||||
homeassistant.components.hardware.*
|
homeassistant.components.hardware.*
|
||||||
|
homeassistant.components.heos.*
|
||||||
homeassistant.components.here_travel_time.*
|
homeassistant.components.here_travel_time.*
|
||||||
homeassistant.components.history.*
|
homeassistant.components.history.*
|
||||||
homeassistant.components.history_stats.*
|
homeassistant.components.history_stats.*
|
||||||
homeassistant.components.holiday.*
|
homeassistant.components.holiday.*
|
||||||
|
homeassistant.components.home_connect.*
|
||||||
homeassistant.components.homeassistant.*
|
homeassistant.components.homeassistant.*
|
||||||
homeassistant.components.homeassistant_alerts.*
|
homeassistant.components.homeassistant_alerts.*
|
||||||
homeassistant.components.homeassistant_green.*
|
homeassistant.components.homeassistant_green.*
|
||||||
homeassistant.components.homeassistant_hardware.*
|
homeassistant.components.homeassistant_hardware.*
|
||||||
homeassistant.components.homeassistant_sky_connect.*
|
homeassistant.components.homeassistant_sky_connect.*
|
||||||
homeassistant.components.homeassistant_yellow.*
|
homeassistant.components.homeassistant_yellow.*
|
||||||
|
homeassistant.components.homee.*
|
||||||
homeassistant.components.homekit.*
|
homeassistant.components.homekit.*
|
||||||
homeassistant.components.homekit_controller
|
homeassistant.components.homekit_controller
|
||||||
homeassistant.components.homekit_controller.alarm_control_panel
|
homeassistant.components.homekit_controller.alarm_control_panel
|
||||||
@ -258,6 +272,8 @@ homeassistant.components.image_processing.*
|
|||||||
homeassistant.components.image_upload.*
|
homeassistant.components.image_upload.*
|
||||||
homeassistant.components.imap.*
|
homeassistant.components.imap.*
|
||||||
homeassistant.components.imgw_pib.*
|
homeassistant.components.imgw_pib.*
|
||||||
|
homeassistant.components.immich.*
|
||||||
|
homeassistant.components.incomfort.*
|
||||||
homeassistant.components.input_button.*
|
homeassistant.components.input_button.*
|
||||||
homeassistant.components.input_select.*
|
homeassistant.components.input_select.*
|
||||||
homeassistant.components.input_text.*
|
homeassistant.components.input_text.*
|
||||||
@ -268,6 +284,7 @@ homeassistant.components.ios.*
|
|||||||
homeassistant.components.iotty.*
|
homeassistant.components.iotty.*
|
||||||
homeassistant.components.ipp.*
|
homeassistant.components.ipp.*
|
||||||
homeassistant.components.iqvia.*
|
homeassistant.components.iqvia.*
|
||||||
|
homeassistant.components.iron_os.*
|
||||||
homeassistant.components.islamic_prayer_times.*
|
homeassistant.components.islamic_prayer_times.*
|
||||||
homeassistant.components.isy994.*
|
homeassistant.components.isy994.*
|
||||||
homeassistant.components.jellyfin.*
|
homeassistant.components.jellyfin.*
|
||||||
@ -277,6 +294,7 @@ homeassistant.components.kaleidescape.*
|
|||||||
homeassistant.components.knocki.*
|
homeassistant.components.knocki.*
|
||||||
homeassistant.components.knx.*
|
homeassistant.components.knx.*
|
||||||
homeassistant.components.kraken.*
|
homeassistant.components.kraken.*
|
||||||
|
homeassistant.components.kulersky.*
|
||||||
homeassistant.components.lacrosse.*
|
homeassistant.components.lacrosse.*
|
||||||
homeassistant.components.lacrosse_view.*
|
homeassistant.components.lacrosse_view.*
|
||||||
homeassistant.components.lamarzocco.*
|
homeassistant.components.lamarzocco.*
|
||||||
@ -287,6 +305,7 @@ homeassistant.components.lcn.*
|
|||||||
homeassistant.components.ld2410_ble.*
|
homeassistant.components.ld2410_ble.*
|
||||||
homeassistant.components.led_ble.*
|
homeassistant.components.led_ble.*
|
||||||
homeassistant.components.lektrico.*
|
homeassistant.components.lektrico.*
|
||||||
|
homeassistant.components.letpot.*
|
||||||
homeassistant.components.lidarr.*
|
homeassistant.components.lidarr.*
|
||||||
homeassistant.components.lifx.*
|
homeassistant.components.lifx.*
|
||||||
homeassistant.components.light.*
|
homeassistant.components.light.*
|
||||||
@ -301,17 +320,22 @@ homeassistant.components.logbook.*
|
|||||||
homeassistant.components.logger.*
|
homeassistant.components.logger.*
|
||||||
homeassistant.components.london_underground.*
|
homeassistant.components.london_underground.*
|
||||||
homeassistant.components.lookin.*
|
homeassistant.components.lookin.*
|
||||||
|
homeassistant.components.lovelace.*
|
||||||
homeassistant.components.luftdaten.*
|
homeassistant.components.luftdaten.*
|
||||||
homeassistant.components.madvr.*
|
homeassistant.components.madvr.*
|
||||||
homeassistant.components.manual.*
|
homeassistant.components.manual.*
|
||||||
homeassistant.components.mastodon.*
|
homeassistant.components.mastodon.*
|
||||||
homeassistant.components.matrix.*
|
homeassistant.components.matrix.*
|
||||||
homeassistant.components.matter.*
|
homeassistant.components.matter.*
|
||||||
|
homeassistant.components.mcp.*
|
||||||
|
homeassistant.components.mcp_server.*
|
||||||
|
homeassistant.components.mealie.*
|
||||||
homeassistant.components.media_extractor.*
|
homeassistant.components.media_extractor.*
|
||||||
homeassistant.components.media_player.*
|
homeassistant.components.media_player.*
|
||||||
homeassistant.components.media_source.*
|
homeassistant.components.media_source.*
|
||||||
homeassistant.components.met_eireann.*
|
homeassistant.components.met_eireann.*
|
||||||
homeassistant.components.metoffice.*
|
homeassistant.components.metoffice.*
|
||||||
|
homeassistant.components.miele.*
|
||||||
homeassistant.components.mikrotik.*
|
homeassistant.components.mikrotik.*
|
||||||
homeassistant.components.min_max.*
|
homeassistant.components.min_max.*
|
||||||
homeassistant.components.minecraft_server.*
|
homeassistant.components.minecraft_server.*
|
||||||
@ -330,6 +354,7 @@ homeassistant.components.mysensors.*
|
|||||||
homeassistant.components.myuplink.*
|
homeassistant.components.myuplink.*
|
||||||
homeassistant.components.nam.*
|
homeassistant.components.nam.*
|
||||||
homeassistant.components.nanoleaf.*
|
homeassistant.components.nanoleaf.*
|
||||||
|
homeassistant.components.nasweb.*
|
||||||
homeassistant.components.neato.*
|
homeassistant.components.neato.*
|
||||||
homeassistant.components.nest.*
|
homeassistant.components.nest.*
|
||||||
homeassistant.components.netatmo.*
|
homeassistant.components.netatmo.*
|
||||||
@ -339,12 +364,16 @@ homeassistant.components.nfandroidtv.*
|
|||||||
homeassistant.components.nightscout.*
|
homeassistant.components.nightscout.*
|
||||||
homeassistant.components.nissan_leaf.*
|
homeassistant.components.nissan_leaf.*
|
||||||
homeassistant.components.no_ip.*
|
homeassistant.components.no_ip.*
|
||||||
|
homeassistant.components.nordpool.*
|
||||||
homeassistant.components.notify.*
|
homeassistant.components.notify.*
|
||||||
homeassistant.components.notion.*
|
homeassistant.components.notion.*
|
||||||
|
homeassistant.components.ntfy.*
|
||||||
homeassistant.components.number.*
|
homeassistant.components.number.*
|
||||||
homeassistant.components.nut.*
|
homeassistant.components.nut.*
|
||||||
|
homeassistant.components.ohme.*
|
||||||
homeassistant.components.onboarding.*
|
homeassistant.components.onboarding.*
|
||||||
homeassistant.components.oncue.*
|
homeassistant.components.oncue.*
|
||||||
|
homeassistant.components.onedrive.*
|
||||||
homeassistant.components.onewire.*
|
homeassistant.components.onewire.*
|
||||||
homeassistant.components.onkyo.*
|
homeassistant.components.onkyo.*
|
||||||
homeassistant.components.open_meteo.*
|
homeassistant.components.open_meteo.*
|
||||||
@ -355,13 +384,20 @@ homeassistant.components.openuv.*
|
|||||||
homeassistant.components.oralb.*
|
homeassistant.components.oralb.*
|
||||||
homeassistant.components.otbr.*
|
homeassistant.components.otbr.*
|
||||||
homeassistant.components.overkiz.*
|
homeassistant.components.overkiz.*
|
||||||
|
homeassistant.components.overseerr.*
|
||||||
homeassistant.components.p1_monitor.*
|
homeassistant.components.p1_monitor.*
|
||||||
|
homeassistant.components.pandora.*
|
||||||
homeassistant.components.panel_custom.*
|
homeassistant.components.panel_custom.*
|
||||||
|
homeassistant.components.paperless_ngx.*
|
||||||
|
homeassistant.components.peblar.*
|
||||||
homeassistant.components.peco.*
|
homeassistant.components.peco.*
|
||||||
|
homeassistant.components.pegel_online.*
|
||||||
homeassistant.components.persistent_notification.*
|
homeassistant.components.persistent_notification.*
|
||||||
|
homeassistant.components.person.*
|
||||||
homeassistant.components.pi_hole.*
|
homeassistant.components.pi_hole.*
|
||||||
homeassistant.components.ping.*
|
homeassistant.components.ping.*
|
||||||
homeassistant.components.plugwise.*
|
homeassistant.components.plugwise.*
|
||||||
|
homeassistant.components.powerfox.*
|
||||||
homeassistant.components.powerwall.*
|
homeassistant.components.powerwall.*
|
||||||
homeassistant.components.private_ble_device.*
|
homeassistant.components.private_ble_device.*
|
||||||
homeassistant.components.prometheus.*
|
homeassistant.components.prometheus.*
|
||||||
@ -371,6 +407,9 @@ homeassistant.components.pure_energie.*
|
|||||||
homeassistant.components.purpleair.*
|
homeassistant.components.purpleair.*
|
||||||
homeassistant.components.pushbullet.*
|
homeassistant.components.pushbullet.*
|
||||||
homeassistant.components.pvoutput.*
|
homeassistant.components.pvoutput.*
|
||||||
|
homeassistant.components.pyload.*
|
||||||
|
homeassistant.components.python_script.*
|
||||||
|
homeassistant.components.qbus.*
|
||||||
homeassistant.components.qnap_qsw.*
|
homeassistant.components.qnap_qsw.*
|
||||||
homeassistant.components.rabbitair.*
|
homeassistant.components.rabbitair.*
|
||||||
homeassistant.components.radarr.*
|
homeassistant.components.radarr.*
|
||||||
@ -381,8 +420,11 @@ homeassistant.components.raspberry_pi.*
|
|||||||
homeassistant.components.rdw.*
|
homeassistant.components.rdw.*
|
||||||
homeassistant.components.recollect_waste.*
|
homeassistant.components.recollect_waste.*
|
||||||
homeassistant.components.recorder.*
|
homeassistant.components.recorder.*
|
||||||
|
homeassistant.components.remember_the_milk.*
|
||||||
homeassistant.components.remote.*
|
homeassistant.components.remote.*
|
||||||
|
homeassistant.components.remote_calendar.*
|
||||||
homeassistant.components.renault.*
|
homeassistant.components.renault.*
|
||||||
|
homeassistant.components.reolink.*
|
||||||
homeassistant.components.repairs.*
|
homeassistant.components.repairs.*
|
||||||
homeassistant.components.rest.*
|
homeassistant.components.rest.*
|
||||||
homeassistant.components.rest_command.*
|
homeassistant.components.rest_command.*
|
||||||
@ -396,12 +438,13 @@ homeassistant.components.roku.*
|
|||||||
homeassistant.components.romy.*
|
homeassistant.components.romy.*
|
||||||
homeassistant.components.rpi_power.*
|
homeassistant.components.rpi_power.*
|
||||||
homeassistant.components.rss_feed_template.*
|
homeassistant.components.rss_feed_template.*
|
||||||
homeassistant.components.rtsp_to_webrtc.*
|
homeassistant.components.russound_rio.*
|
||||||
homeassistant.components.ruuvi_gateway.*
|
homeassistant.components.ruuvi_gateway.*
|
||||||
homeassistant.components.ruuvitag_ble.*
|
homeassistant.components.ruuvitag_ble.*
|
||||||
homeassistant.components.samsungtv.*
|
homeassistant.components.samsungtv.*
|
||||||
homeassistant.components.scene.*
|
homeassistant.components.scene.*
|
||||||
homeassistant.components.schedule.*
|
homeassistant.components.schedule.*
|
||||||
|
homeassistant.components.schlage.*
|
||||||
homeassistant.components.scrape.*
|
homeassistant.components.scrape.*
|
||||||
homeassistant.components.script.*
|
homeassistant.components.script.*
|
||||||
homeassistant.components.search.*
|
homeassistant.components.search.*
|
||||||
@ -409,6 +452,7 @@ homeassistant.components.select.*
|
|||||||
homeassistant.components.sensibo.*
|
homeassistant.components.sensibo.*
|
||||||
homeassistant.components.sensirion_ble.*
|
homeassistant.components.sensirion_ble.*
|
||||||
homeassistant.components.sensor.*
|
homeassistant.components.sensor.*
|
||||||
|
homeassistant.components.sensorpush_cloud.*
|
||||||
homeassistant.components.sensoterra.*
|
homeassistant.components.sensoterra.*
|
||||||
homeassistant.components.senz.*
|
homeassistant.components.senz.*
|
||||||
homeassistant.components.sfr_box.*
|
homeassistant.components.sfr_box.*
|
||||||
@ -423,6 +467,7 @@ homeassistant.components.slack.*
|
|||||||
homeassistant.components.sleepiq.*
|
homeassistant.components.sleepiq.*
|
||||||
homeassistant.components.smhi.*
|
homeassistant.components.smhi.*
|
||||||
homeassistant.components.smlight.*
|
homeassistant.components.smlight.*
|
||||||
|
homeassistant.components.smtp.*
|
||||||
homeassistant.components.snooz.*
|
homeassistant.components.snooz.*
|
||||||
homeassistant.components.solarlog.*
|
homeassistant.components.solarlog.*
|
||||||
homeassistant.components.sonarr.*
|
homeassistant.components.sonarr.*
|
||||||
@ -434,7 +479,7 @@ homeassistant.components.ssdp.*
|
|||||||
homeassistant.components.starlink.*
|
homeassistant.components.starlink.*
|
||||||
homeassistant.components.statistics.*
|
homeassistant.components.statistics.*
|
||||||
homeassistant.components.steamist.*
|
homeassistant.components.steamist.*
|
||||||
homeassistant.components.stookalert.*
|
homeassistant.components.stookwijzer.*
|
||||||
homeassistant.components.stream.*
|
homeassistant.components.stream.*
|
||||||
homeassistant.components.streamlabswater.*
|
homeassistant.components.streamlabswater.*
|
||||||
homeassistant.components.stt.*
|
homeassistant.components.stt.*
|
||||||
@ -458,6 +503,7 @@ homeassistant.components.tautulli.*
|
|||||||
homeassistant.components.tcp.*
|
homeassistant.components.tcp.*
|
||||||
homeassistant.components.technove.*
|
homeassistant.components.technove.*
|
||||||
homeassistant.components.tedee.*
|
homeassistant.components.tedee.*
|
||||||
|
homeassistant.components.telegram_bot.*
|
||||||
homeassistant.components.text.*
|
homeassistant.components.text.*
|
||||||
homeassistant.components.thethingsnetwork.*
|
homeassistant.components.thethingsnetwork.*
|
||||||
homeassistant.components.threshold.*
|
homeassistant.components.threshold.*
|
||||||
@ -496,6 +542,7 @@ homeassistant.components.vallox.*
|
|||||||
homeassistant.components.valve.*
|
homeassistant.components.valve.*
|
||||||
homeassistant.components.velbus.*
|
homeassistant.components.velbus.*
|
||||||
homeassistant.components.vlc_telnet.*
|
homeassistant.components.vlc_telnet.*
|
||||||
|
homeassistant.components.vodafone_station.*
|
||||||
homeassistant.components.wake_on_lan.*
|
homeassistant.components.wake_on_lan.*
|
||||||
homeassistant.components.wake_word.*
|
homeassistant.components.wake_word.*
|
||||||
homeassistant.components.wallbox.*
|
homeassistant.components.wallbox.*
|
||||||
|
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@ -38,10 +38,17 @@
|
|||||||
"module": "pytest",
|
"module": "pytest",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
"args": [
|
"args": [
|
||||||
"--timeout=10",
|
|
||||||
"--picked"
|
"--picked"
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Home Assistant: Debug Current Test File",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "pytest",
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"args": ["-vv", "${file}"]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
||||||
// See https://www.home-assistant.io/integrations/debugpy/
|
// See https://www.home-assistant.io/integrations/debugpy/
|
||||||
@ -77,4 +84,4 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
3
.vscode/settings.default.json
vendored
3
.vscode/settings.default.json
vendored
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
// Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
// Please keep this file (mostly!) in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
||||||
// Added --no-cov to work around TypeError: message must be set
|
// Added --no-cov to work around TypeError: message must be set
|
||||||
// https://github.com/microsoft/vscode-python/issues/14067
|
// https://github.com/microsoft/vscode-python/issues/14067
|
||||||
"python.testing.pytestArgs": ["--no-cov"],
|
"python.testing.pytestArgs": ["--no-cov"],
|
||||||
@ -12,6 +12,7 @@
|
|||||||
"fileMatch": [
|
"fileMatch": [
|
||||||
"homeassistant/components/*/manifest.json"
|
"homeassistant/components/*/manifest.json"
|
||||||
],
|
],
|
||||||
|
// This value differs between working with devcontainer and locally, therefor this value should NOT be in sync!
|
||||||
"url": "./script/json_schemas/manifest_schema.json"
|
"url": "./script/json_schemas/manifest_schema.json"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
48
.vscode/tasks.json
vendored
48
.vscode/tasks.json
vendored
@ -4,7 +4,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Run Home Assistant Core",
|
"label": "Run Home Assistant Core",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "hass -c ./config",
|
"command": "${command:python.interpreterPath} -m homeassistant -c ./config",
|
||||||
"group": "test",
|
"group": "test",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
@ -16,7 +16,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Pytest",
|
"label": "Pytest",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m pytest --timeout=10 tests",
|
"command": "${command:python.interpreterPath} -m pytest --timeout=10 tests",
|
||||||
"dependsOn": ["Install all Test Requirements"],
|
"dependsOn": ["Install all Test Requirements"],
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
@ -31,7 +31,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Pytest (changed tests only)",
|
"label": "Pytest (changed tests only)",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m pytest --timeout=10 --picked",
|
"command": "${command:python.interpreterPath} -m pytest --timeout=10 --picked",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@ -45,7 +45,21 @@
|
|||||||
{
|
{
|
||||||
"label": "Ruff",
|
"label": "Ruff",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pre-commit run ruff --all-files",
|
"command": "pre-commit run ruff-check --all-files",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pre-commit",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pre-commit run --show-diff-on-failure",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@ -75,7 +89,23 @@
|
|||||||
"label": "Code Coverage",
|
"label": "Code Coverage",
|
||||||
"detail": "Generate code coverage report for a given integration.",
|
"detail": "Generate code coverage report for a given integration.",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||||
|
"dependsOn": ["Compile English translations"],
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Update syrupy snapshots",
|
||||||
|
"detail": "Update syrupy snapshots for a given integration.",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName} --snapshot-update",
|
||||||
"dependsOn": ["Compile English translations"],
|
"dependsOn": ["Compile English translations"],
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
@ -118,7 +148,7 @@
|
|||||||
{
|
{
|
||||||
"label": "Install all Test Requirements",
|
"label": "Install all Test Requirements",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "uv pip install -r requirements_test_all.txt",
|
"command": "uv pip install -r requirements.txt -r requirements_test_all.txt",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@ -133,7 +163,7 @@
|
|||||||
"label": "Compile English translations",
|
"label": "Compile English translations",
|
||||||
"detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.",
|
"detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m script.translations develop --all",
|
"command": "${command:python.interpreterPath} -m script.translations develop --all",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@ -143,7 +173,7 @@
|
|||||||
"label": "Run scaffold",
|
"label": "Run scaffold",
|
||||||
"detail": "Add new functionality to a integration using a scaffold.",
|
"detail": "Add new functionality to a integration using a scaffold.",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
|
"command": "${command:python.interpreterPath} -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
@ -153,7 +183,7 @@
|
|||||||
"label": "Create new integration",
|
"label": "Create new integration",
|
||||||
"detail": "Use the scaffold to create a new integration.",
|
"detail": "Use the scaffold to create a new integration.",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "python3 -m script.scaffold integration",
|
"command": "${command:python.interpreterPath} -m script.scaffold integration",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
249
CODEOWNERS
generated
249
CODEOWNERS
generated
@ -40,12 +40,14 @@ build.json @home-assistant/supervisor
|
|||||||
# Integrations
|
# Integrations
|
||||||
/homeassistant/components/abode/ @shred86
|
/homeassistant/components/abode/ @shred86
|
||||||
/tests/components/abode/ @shred86
|
/tests/components/abode/ @shred86
|
||||||
|
/homeassistant/components/acaia/ @zweckj
|
||||||
|
/tests/components/acaia/ @zweckj
|
||||||
/homeassistant/components/accuweather/ @bieniu
|
/homeassistant/components/accuweather/ @bieniu
|
||||||
/tests/components/accuweather/ @bieniu
|
/tests/components/accuweather/ @bieniu
|
||||||
/homeassistant/components/acmeda/ @atmurray
|
/homeassistant/components/acmeda/ @atmurray
|
||||||
/tests/components/acmeda/ @atmurray
|
/tests/components/acmeda/ @atmurray
|
||||||
/homeassistant/components/adax/ @danielhiversen
|
/homeassistant/components/adax/ @danielhiversen @lazytarget
|
||||||
/tests/components/adax/ @danielhiversen
|
/tests/components/adax/ @danielhiversen @lazytarget
|
||||||
/homeassistant/components/adguard/ @frenck
|
/homeassistant/components/adguard/ @frenck
|
||||||
/tests/components/adguard/ @frenck
|
/tests/components/adguard/ @frenck
|
||||||
/homeassistant/components/ads/ @mrpasztoradam
|
/homeassistant/components/ads/ @mrpasztoradam
|
||||||
@ -55,6 +57,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/aemet/ @Noltari
|
/tests/components/aemet/ @Noltari
|
||||||
/homeassistant/components/agent_dvr/ @ispysoftware
|
/homeassistant/components/agent_dvr/ @ispysoftware
|
||||||
/tests/components/agent_dvr/ @ispysoftware
|
/tests/components/agent_dvr/ @ispysoftware
|
||||||
|
/homeassistant/components/ai_task/ @home-assistant/core
|
||||||
|
/tests/components/ai_task/ @home-assistant/core
|
||||||
/homeassistant/components/air_quality/ @home-assistant/core
|
/homeassistant/components/air_quality/ @home-assistant/core
|
||||||
/tests/components/air_quality/ @home-assistant/core
|
/tests/components/air_quality/ @home-assistant/core
|
||||||
/homeassistant/components/airgradient/ @airgradienthq @joostlek
|
/homeassistant/components/airgradient/ @airgradienthq @joostlek
|
||||||
@ -87,6 +91,10 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/alert/ @home-assistant/core @frenck
|
/tests/components/alert/ @home-assistant/core @frenck
|
||||||
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||||
|
/homeassistant/components/alexa_devices/ @chemelli74
|
||||||
|
/tests/components/alexa_devices/ @chemelli74
|
||||||
|
/homeassistant/components/altruist/ @airalab @LoSk-p
|
||||||
|
/tests/components/altruist/ @airalab @LoSk-p
|
||||||
/homeassistant/components/amazon_polly/ @jschlyter
|
/homeassistant/components/amazon_polly/ @jschlyter
|
||||||
/homeassistant/components/amberelectric/ @madpilot
|
/homeassistant/components/amberelectric/ @madpilot
|
||||||
/tests/components/amberelectric/ @madpilot
|
/tests/components/amberelectric/ @madpilot
|
||||||
@ -169,6 +177,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/avea/ @pattyland
|
/homeassistant/components/avea/ @pattyland
|
||||||
/homeassistant/components/awair/ @ahayworth @danielsjf
|
/homeassistant/components/awair/ @ahayworth @danielsjf
|
||||||
/tests/components/awair/ @ahayworth @danielsjf
|
/tests/components/awair/ @ahayworth @danielsjf
|
||||||
|
/homeassistant/components/aws_s3/ @tomasbedrich
|
||||||
|
/tests/components/aws_s3/ @tomasbedrich
|
||||||
/homeassistant/components/axis/ @Kane610
|
/homeassistant/components/axis/ @Kane610
|
||||||
/tests/components/axis/ @Kane610
|
/tests/components/axis/ @Kane610
|
||||||
/homeassistant/components/azure_data_explorer/ @kaareseras
|
/homeassistant/components/azure_data_explorer/ @kaareseras
|
||||||
@ -178,6 +188,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
|
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
|
||||||
/tests/components/azure_event_hub/ @eavanvalkenburg
|
/tests/components/azure_event_hub/ @eavanvalkenburg
|
||||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||||
|
/homeassistant/components/azure_storage/ @zweckj
|
||||||
|
/tests/components/azure_storage/ @zweckj
|
||||||
/homeassistant/components/backup/ @home-assistant/core
|
/homeassistant/components/backup/ @home-assistant/core
|
||||||
/tests/components/backup/ @home-assistant/core
|
/tests/components/backup/ @home-assistant/core
|
||||||
/homeassistant/components/baf/ @bdraco @jfroy
|
/homeassistant/components/baf/ @bdraco @jfroy
|
||||||
@ -196,8 +208,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/blebox/ @bbx-a @swistakm
|
/tests/components/blebox/ @bbx-a @swistakm
|
||||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||||
/tests/components/blink/ @fronzbot @mkmer
|
/tests/components/blink/ @fronzbot @mkmer
|
||||||
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||||
/tests/components/blue_current/ @Floris272 @gleeuwen
|
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||||
/homeassistant/components/bluemaestro/ @bdraco
|
/homeassistant/components/bluemaestro/ @bdraco
|
||||||
/tests/components/bluemaestro/ @bdraco
|
/tests/components/bluemaestro/ @bdraco
|
||||||
/homeassistant/components/blueprint/ @home-assistant/core
|
/homeassistant/components/blueprint/ @home-assistant/core
|
||||||
@ -212,6 +224,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
||||||
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||||
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||||
|
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
|
||||||
|
/tests/components/bosch_alarm/ @mag1024 @sanjay900
|
||||||
/homeassistant/components/bosch_shc/ @tschamm
|
/homeassistant/components/bosch_shc/ @tschamm
|
||||||
/tests/components/bosch_shc/ @tschamm
|
/tests/components/bosch_shc/ @tschamm
|
||||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||||
@ -282,6 +296,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/control4/ @lawtancool
|
/tests/components/control4/ @lawtancool
|
||||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||||
|
/homeassistant/components/cookidoo/ @miaucl
|
||||||
|
/tests/components/cookidoo/ @miaucl
|
||||||
/homeassistant/components/coolmaster/ @OnFreund
|
/homeassistant/components/coolmaster/ @OnFreund
|
||||||
/tests/components/coolmaster/ @OnFreund
|
/tests/components/coolmaster/ @OnFreund
|
||||||
/homeassistant/components/counter/ @fabaff
|
/homeassistant/components/counter/ @fabaff
|
||||||
@ -293,6 +309,7 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||||
/homeassistant/components/cups/ @fabaff
|
/homeassistant/components/cups/ @fabaff
|
||||||
|
/tests/components/cups/ @fabaff
|
||||||
/homeassistant/components/daikin/ @fredrike
|
/homeassistant/components/daikin/ @fredrike
|
||||||
/tests/components/daikin/ @fredrike
|
/tests/components/daikin/ @fredrike
|
||||||
/homeassistant/components/date/ @home-assistant/core
|
/homeassistant/components/date/ @home-assistant/core
|
||||||
@ -314,8 +331,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/demo/ @home-assistant/core
|
/tests/components/demo/ @home-assistant/core
|
||||||
/homeassistant/components/denonavr/ @ol-iver @starkillerOG
|
/homeassistant/components/denonavr/ @ol-iver @starkillerOG
|
||||||
/tests/components/denonavr/ @ol-iver @starkillerOG
|
/tests/components/denonavr/ @ol-iver @starkillerOG
|
||||||
/homeassistant/components/derivative/ @afaucogney
|
/homeassistant/components/derivative/ @afaucogney @karwosts
|
||||||
/tests/components/derivative/ @afaucogney
|
/tests/components/derivative/ @afaucogney @karwosts
|
||||||
/homeassistant/components/devialet/ @fwestenberg
|
/homeassistant/components/devialet/ @fwestenberg
|
||||||
/tests/components/devialet/ @fwestenberg
|
/tests/components/devialet/ @fwestenberg
|
||||||
/homeassistant/components/device_automation/ @home-assistant/core
|
/homeassistant/components/device_automation/ @home-assistant/core
|
||||||
@ -383,6 +400,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/efergy/ @tkdrob
|
/homeassistant/components/efergy/ @tkdrob
|
||||||
/tests/components/efergy/ @tkdrob
|
/tests/components/efergy/ @tkdrob
|
||||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||||
|
/homeassistant/components/eheimdigital/ @autinerd
|
||||||
|
/tests/components/eheimdigital/ @autinerd
|
||||||
/homeassistant/components/electrasmart/ @jafar-atili
|
/homeassistant/components/electrasmart/ @jafar-atili
|
||||||
/tests/components/electrasmart/ @jafar-atili
|
/tests/components/electrasmart/ @jafar-atili
|
||||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||||
@ -422,7 +441,7 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||||
/homeassistant/components/ephember/ @ttroy50
|
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||||
/homeassistant/components/epion/ @lhgravendeel
|
/homeassistant/components/epion/ @lhgravendeel
|
||||||
@ -443,8 +462,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/evil_genius_labs/ @balloob
|
/tests/components/evil_genius_labs/ @balloob
|
||||||
/homeassistant/components/evohome/ @zxdavb
|
/homeassistant/components/evohome/ @zxdavb
|
||||||
/tests/components/evohome/ @zxdavb
|
/tests/components/evohome/ @zxdavb
|
||||||
/homeassistant/components/ezviz/ @RenierM26 @baqs
|
/homeassistant/components/ezviz/ @RenierM26
|
||||||
/tests/components/ezviz/ @RenierM26 @baqs
|
/tests/components/ezviz/ @RenierM26
|
||||||
/homeassistant/components/faa_delays/ @ntilley905
|
/homeassistant/components/faa_delays/ @ntilley905
|
||||||
/tests/components/faa_delays/ @ntilley905
|
/tests/components/faa_delays/ @ntilley905
|
||||||
/homeassistant/components/fan/ @home-assistant/core
|
/homeassistant/components/fan/ @home-assistant/core
|
||||||
@ -560,8 +579,10 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/google_assistant_sdk/ @tronikos
|
/tests/components/google_assistant_sdk/ @tronikos
|
||||||
/homeassistant/components/google_cloud/ @lufton @tronikos
|
/homeassistant/components/google_cloud/ @lufton @tronikos
|
||||||
/tests/components/google_cloud/ @lufton @tronikos
|
/tests/components/google_cloud/ @lufton @tronikos
|
||||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
/homeassistant/components/google_drive/ @tronikos
|
||||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
/tests/components/google_drive/ @tronikos
|
||||||
|
/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||||
|
/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
|
||||||
/homeassistant/components/google_mail/ @tkdrob
|
/homeassistant/components/google_mail/ @tkdrob
|
||||||
/tests/components/google_mail/ @tkdrob
|
/tests/components/google_mail/ @tkdrob
|
||||||
/homeassistant/components/google_photos/ @allenporter
|
/homeassistant/components/google_photos/ @allenporter
|
||||||
@ -572,8 +593,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/google_tasks/ @allenporter
|
/tests/components/google_tasks/ @allenporter
|
||||||
/homeassistant/components/google_travel_time/ @eifinger
|
/homeassistant/components/google_travel_time/ @eifinger
|
||||||
/tests/components/google_travel_time/ @eifinger
|
/tests/components/google_travel_time/ @eifinger
|
||||||
/homeassistant/components/govee_ble/ @bdraco @PierreAronnax
|
/homeassistant/components/govee_ble/ @bdraco
|
||||||
/tests/components/govee_ble/ @bdraco @PierreAronnax
|
/tests/components/govee_ble/ @bdraco
|
||||||
/homeassistant/components/govee_light_local/ @Galorhallen
|
/homeassistant/components/govee_light_local/ @Galorhallen
|
||||||
/tests/components/govee_light_local/ @Galorhallen
|
/tests/components/govee_light_local/ @Galorhallen
|
||||||
/homeassistant/components/gpsd/ @fabaff @jrieger
|
/homeassistant/components/gpsd/ @fabaff @jrieger
|
||||||
@ -586,8 +607,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/group/ @home-assistant/core
|
/tests/components/group/ @home-assistant/core
|
||||||
/homeassistant/components/guardian/ @bachya
|
/homeassistant/components/guardian/ @bachya
|
||||||
/tests/components/guardian/ @bachya
|
/tests/components/guardian/ @bachya
|
||||||
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
/homeassistant/components/habitica/ @tr4nt0r
|
||||||
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
/tests/components/habitica/ @tr4nt0r
|
||||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||||
/tests/components/hardkernel/ @home-assistant/core
|
/tests/components/hardkernel/ @home-assistant/core
|
||||||
/homeassistant/components/hardware/ @home-assistant/core
|
/homeassistant/components/hardware/ @home-assistant/core
|
||||||
@ -617,8 +638,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/hlk_sw16/ @jameshilliard
|
/tests/components/hlk_sw16/ @jameshilliard
|
||||||
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
||||||
/tests/components/holiday/ @jrieger @gjohansson-ST
|
/tests/components/holiday/ @jrieger @gjohansson-ST
|
||||||
/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98
|
/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98 @MartinHjelmare
|
||||||
/tests/components/home_connect/ @DavidMStraub @Diegorro98
|
/tests/components/home_connect/ @DavidMStraub @Diegorro98 @MartinHjelmare
|
||||||
/homeassistant/components/homeassistant/ @home-assistant/core
|
/homeassistant/components/homeassistant/ @home-assistant/core
|
||||||
/tests/components/homeassistant/ @home-assistant/core
|
/tests/components/homeassistant/ @home-assistant/core
|
||||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||||
@ -631,6 +652,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
||||||
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
||||||
/tests/components/homeassistant_yellow/ @home-assistant/core
|
/tests/components/homeassistant_yellow/ @home-assistant/core
|
||||||
|
/homeassistant/components/homee/ @Taraman17
|
||||||
|
/tests/components/homee/ @Taraman17
|
||||||
/homeassistant/components/homekit/ @bdraco
|
/homeassistant/components/homekit/ @bdraco
|
||||||
/tests/components/homekit/ @bdraco
|
/tests/components/homekit/ @bdraco
|
||||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||||
@ -674,12 +697,12 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/iammeter/ @lewei50
|
/homeassistant/components/iammeter/ @lewei50
|
||||||
/homeassistant/components/iaqualink/ @flz
|
/homeassistant/components/iaqualink/ @flz
|
||||||
/tests/components/iaqualink/ @flz
|
/tests/components/iaqualink/ @flz
|
||||||
/homeassistant/components/ibeacon/ @bdraco
|
|
||||||
/tests/components/ibeacon/ @bdraco
|
|
||||||
/homeassistant/components/icloud/ @Quentame @nzapponi
|
/homeassistant/components/icloud/ @Quentame @nzapponi
|
||||||
/tests/components/icloud/ @Quentame @nzapponi
|
/tests/components/icloud/ @Quentame @nzapponi
|
||||||
/homeassistant/components/idasen_desk/ @abmantis
|
/homeassistant/components/idasen_desk/ @abmantis
|
||||||
/tests/components/idasen_desk/ @abmantis
|
/tests/components/idasen_desk/ @abmantis
|
||||||
|
/homeassistant/components/igloohome/ @keithle888
|
||||||
|
/tests/components/igloohome/ @keithle888
|
||||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||||
/tests/components/ign_sismologia/ @exxamalte
|
/tests/components/ign_sismologia/ @exxamalte
|
||||||
/homeassistant/components/image/ @home-assistant/core
|
/homeassistant/components/image/ @home-assistant/core
|
||||||
@ -690,8 +713,12 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/image_upload/ @home-assistant/core
|
/tests/components/image_upload/ @home-assistant/core
|
||||||
/homeassistant/components/imap/ @jbouwh
|
/homeassistant/components/imap/ @jbouwh
|
||||||
/tests/components/imap/ @jbouwh
|
/tests/components/imap/ @jbouwh
|
||||||
|
/homeassistant/components/imeon_inverter/ @Imeon-Energy
|
||||||
|
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||||
/homeassistant/components/imgw_pib/ @bieniu
|
/homeassistant/components/imgw_pib/ @bieniu
|
||||||
/tests/components/imgw_pib/ @bieniu
|
/tests/components/imgw_pib/ @bieniu
|
||||||
|
/homeassistant/components/immich/ @mib1185
|
||||||
|
/tests/components/immich/ @mib1185
|
||||||
/homeassistant/components/improv_ble/ @emontnemery
|
/homeassistant/components/improv_ble/ @emontnemery
|
||||||
/tests/components/improv_ble/ @emontnemery
|
/tests/components/improv_ble/ @emontnemery
|
||||||
/homeassistant/components/incomfort/ @jbouwh
|
/homeassistant/components/incomfort/ @jbouwh
|
||||||
@ -721,12 +748,14 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
||||||
/tests/components/intent/ @home-assistant/core @synesthesiam
|
/tests/components/intent/ @home-assistant/core @synesthesiam
|
||||||
/homeassistant/components/intesishome/ @jnimmo
|
/homeassistant/components/intesishome/ @jnimmo
|
||||||
|
/homeassistant/components/iometer/ @MaestroOnICe
|
||||||
|
/tests/components/iometer/ @MaestroOnICe
|
||||||
/homeassistant/components/ios/ @robbiet480
|
/homeassistant/components/ios/ @robbiet480
|
||||||
/tests/components/ios/ @robbiet480
|
/tests/components/ios/ @robbiet480
|
||||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||||
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
|
/homeassistant/components/iotty/ @shapournemati-iotty
|
||||||
/tests/components/iotty/ @pburgio @shapournemati-iotty
|
/tests/components/iotty/ @shapournemati-iotty
|
||||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||||
/homeassistant/components/ipma/ @dgomes
|
/homeassistant/components/ipma/ @dgomes
|
||||||
/tests/components/ipma/ @dgomes
|
/tests/components/ipma/ @dgomes
|
||||||
@ -751,14 +780,14 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ista_ecotrend/ @tr4nt0r
|
/tests/components/ista_ecotrend/ @tr4nt0r
|
||||||
/homeassistant/components/isy994/ @bdraco @shbatm
|
/homeassistant/components/isy994/ @bdraco @shbatm
|
||||||
/tests/components/isy994/ @bdraco @shbatm
|
/tests/components/isy994/ @bdraco @shbatm
|
||||||
|
/homeassistant/components/ituran/ @shmuelzon
|
||||||
|
/tests/components/ituran/ @shmuelzon
|
||||||
/homeassistant/components/izone/ @Swamp-Ig
|
/homeassistant/components/izone/ @Swamp-Ig
|
||||||
/tests/components/izone/ @Swamp-Ig
|
/tests/components/izone/ @Swamp-Ig
|
||||||
/homeassistant/components/jellyfin/ @j-stienstra @ctalkington
|
/homeassistant/components/jellyfin/ @RunC0deRun @ctalkington
|
||||||
/tests/components/jellyfin/ @j-stienstra @ctalkington
|
/tests/components/jellyfin/ @RunC0deRun @ctalkington
|
||||||
/homeassistant/components/jewish_calendar/ @tsvi
|
/homeassistant/components/jewish_calendar/ @tsvi
|
||||||
/tests/components/jewish_calendar/ @tsvi
|
/tests/components/jewish_calendar/ @tsvi
|
||||||
/homeassistant/components/juicenet/ @jesserockz
|
|
||||||
/tests/components/juicenet/ @jesserockz
|
|
||||||
/homeassistant/components/justnimbus/ @kvanzuijlen
|
/homeassistant/components/justnimbus/ @kvanzuijlen
|
||||||
/tests/components/justnimbus/ @kvanzuijlen
|
/tests/components/justnimbus/ @kvanzuijlen
|
||||||
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
/homeassistant/components/jvc_projector/ @SteveEasley @msavazzi
|
||||||
@ -819,6 +848,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/led_ble/ @bdraco
|
/tests/components/led_ble/ @bdraco
|
||||||
/homeassistant/components/lektrico/ @lektrico
|
/homeassistant/components/lektrico/ @lektrico
|
||||||
/tests/components/lektrico/ @lektrico
|
/tests/components/lektrico/ @lektrico
|
||||||
|
/homeassistant/components/letpot/ @jpelgrom
|
||||||
|
/tests/components/letpot/ @jpelgrom
|
||||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||||
@ -879,6 +910,10 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/matrix/ @PaarthShah
|
/tests/components/matrix/ @PaarthShah
|
||||||
/homeassistant/components/matter/ @home-assistant/matter
|
/homeassistant/components/matter/ @home-assistant/matter
|
||||||
/tests/components/matter/ @home-assistant/matter
|
/tests/components/matter/ @home-assistant/matter
|
||||||
|
/homeassistant/components/mcp/ @allenporter
|
||||||
|
/tests/components/mcp/ @allenporter
|
||||||
|
/homeassistant/components/mcp_server/ @allenporter
|
||||||
|
/tests/components/mcp_server/ @allenporter
|
||||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||||
@ -911,6 +946,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||||
/homeassistant/components/microbees/ @microBeesTech
|
/homeassistant/components/microbees/ @microBeesTech
|
||||||
/tests/components/microbees/ @microBeesTech
|
/tests/components/microbees/ @microBeesTech
|
||||||
|
/homeassistant/components/miele/ @astrandb
|
||||||
|
/tests/components/miele/ @astrandb
|
||||||
/homeassistant/components/mikrotik/ @engrbm87
|
/homeassistant/components/mikrotik/ @engrbm87
|
||||||
/tests/components/mikrotik/ @engrbm87
|
/tests/components/mikrotik/ @engrbm87
|
||||||
/homeassistant/components/mill/ @danielhiversen
|
/homeassistant/components/mill/ @danielhiversen
|
||||||
@ -947,8 +984,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/motionblinds_ble/ @LennP @jerrybboy
|
/tests/components/motionblinds_ble/ @LennP @jerrybboy
|
||||||
/homeassistant/components/motioneye/ @dermotduffy
|
/homeassistant/components/motioneye/ @dermotduffy
|
||||||
/tests/components/motioneye/ @dermotduffy
|
/tests/components/motioneye/ @dermotduffy
|
||||||
/homeassistant/components/motionmount/ @RJPoelstra
|
/homeassistant/components/motionmount/ @laiho-vogels
|
||||||
/tests/components/motionmount/ @RJPoelstra
|
/tests/components/motionmount/ @laiho-vogels
|
||||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||||
/homeassistant/components/msteams/ @peroyvind
|
/homeassistant/components/msteams/ @peroyvind
|
||||||
@ -970,8 +1007,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/nam/ @bieniu
|
/tests/components/nam/ @bieniu
|
||||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||||
/homeassistant/components/neato/ @Santobert
|
/homeassistant/components/nasweb/ @nasWebio
|
||||||
/tests/components/neato/ @Santobert
|
/tests/components/nasweb/ @nasWebio
|
||||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||||
/homeassistant/components/ness_alarm/ @nickw444
|
/homeassistant/components/ness_alarm/ @nickw444
|
||||||
/tests/components/ness_alarm/ @nickw444
|
/tests/components/ness_alarm/ @nickw444
|
||||||
@ -1002,14 +1039,17 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/nice_go/ @IceBotYT
|
/tests/components/nice_go/ @IceBotYT
|
||||||
/homeassistant/components/nightscout/ @marciogranzotto
|
/homeassistant/components/nightscout/ @marciogranzotto
|
||||||
/tests/components/nightscout/ @marciogranzotto
|
/tests/components/nightscout/ @marciogranzotto
|
||||||
|
/homeassistant/components/niko_home_control/ @VandeurenGlenn
|
||||||
|
/tests/components/niko_home_control/ @VandeurenGlenn
|
||||||
/homeassistant/components/nilu/ @hfurubotten
|
/homeassistant/components/nilu/ @hfurubotten
|
||||||
/homeassistant/components/nina/ @DeerMaximum
|
/homeassistant/components/nina/ @DeerMaximum
|
||||||
/tests/components/nina/ @DeerMaximum
|
/tests/components/nina/ @DeerMaximum
|
||||||
/homeassistant/components/nissan_leaf/ @filcole
|
/homeassistant/components/nissan_leaf/ @filcole
|
||||||
/homeassistant/components/nmbs/ @thibmaek
|
|
||||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||||
|
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||||
|
/tests/components/nordpool/ @gjohansson-ST
|
||||||
/homeassistant/components/notify/ @home-assistant/core
|
/homeassistant/components/notify/ @home-assistant/core
|
||||||
/tests/components/notify/ @home-assistant/core
|
/tests/components/notify/ @home-assistant/core
|
||||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||||
@ -1020,6 +1060,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/nsw_fuel_station/ @nickw444
|
/tests/components/nsw_fuel_station/ @nickw444
|
||||||
/homeassistant/components/nsw_rural_fire_service_feed/ @exxamalte
|
/homeassistant/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||||
/tests/components/nsw_rural_fire_service_feed/ @exxamalte
|
/tests/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||||
|
/homeassistant/components/ntfy/ @tr4nt0r
|
||||||
|
/tests/components/ntfy/ @tr4nt0r
|
||||||
/homeassistant/components/nuheat/ @tstabrawa
|
/homeassistant/components/nuheat/ @tstabrawa
|
||||||
/tests/components/nuheat/ @tstabrawa
|
/tests/components/nuheat/ @tstabrawa
|
||||||
/homeassistant/components/nuki/ @pschmitt @pvizeli @pree
|
/homeassistant/components/nuki/ @pschmitt @pvizeli @pree
|
||||||
@ -1028,8 +1070,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/numato/ @clssn
|
/tests/components/numato/ @clssn
|
||||||
/homeassistant/components/number/ @home-assistant/core @Shulyaka
|
/homeassistant/components/number/ @home-assistant/core @Shulyaka
|
||||||
/tests/components/number/ @home-assistant/core @Shulyaka
|
/tests/components/number/ @home-assistant/core @Shulyaka
|
||||||
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez
|
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
|
||||||
/tests/components/nut/ @bdraco @ollo69 @pestevez
|
/tests/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
|
||||||
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
||||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||||
/homeassistant/components/nyt_games/ @joostlek
|
/homeassistant/components/nyt_games/ @joostlek
|
||||||
@ -1041,21 +1083,23 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/octoprint/ @rfleming71
|
/homeassistant/components/octoprint/ @rfleming71
|
||||||
/tests/components/octoprint/ @rfleming71
|
/tests/components/octoprint/ @rfleming71
|
||||||
/homeassistant/components/ohmconnect/ @robbiet480
|
/homeassistant/components/ohmconnect/ @robbiet480
|
||||||
|
/homeassistant/components/ohme/ @dan-r
|
||||||
|
/tests/components/ohme/ @dan-r
|
||||||
/homeassistant/components/ollama/ @synesthesiam
|
/homeassistant/components/ollama/ @synesthesiam
|
||||||
/tests/components/ollama/ @synesthesiam
|
/tests/components/ollama/ @synesthesiam
|
||||||
/homeassistant/components/ombi/ @larssont
|
/homeassistant/components/ombi/ @larssont
|
||||||
/homeassistant/components/onboarding/ @home-assistant/core
|
/homeassistant/components/onboarding/ @home-assistant/core
|
||||||
/tests/components/onboarding/ @home-assistant/core
|
/tests/components/onboarding/ @home-assistant/core
|
||||||
/homeassistant/components/oncue/ @bdraco @peterager
|
|
||||||
/tests/components/oncue/ @bdraco @peterager
|
|
||||||
/homeassistant/components/ondilo_ico/ @JeromeHXP
|
/homeassistant/components/ondilo_ico/ @JeromeHXP
|
||||||
/tests/components/ondilo_ico/ @JeromeHXP
|
/tests/components/ondilo_ico/ @JeromeHXP
|
||||||
|
/homeassistant/components/onedrive/ @zweckj
|
||||||
|
/tests/components/onedrive/ @zweckj
|
||||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||||
/tests/components/onewire/ @garbled1 @epenet
|
/tests/components/onewire/ @garbled1 @epenet
|
||||||
/homeassistant/components/onkyo/ @arturpragacz
|
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||||
/tests/components/onkyo/ @arturpragacz
|
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||||
/homeassistant/components/onvif/ @hunterjm
|
/homeassistant/components/onvif/ @hunterjm @jterrace
|
||||||
/tests/components/onvif/ @hunterjm
|
/tests/components/onvif/ @hunterjm @jterrace
|
||||||
/homeassistant/components/open_meteo/ @frenck
|
/homeassistant/components/open_meteo/ @frenck
|
||||||
/tests/components/open_meteo/ @frenck
|
/tests/components/open_meteo/ @frenck
|
||||||
/homeassistant/components/openai_conversation/ @balloob
|
/homeassistant/components/openai_conversation/ @balloob
|
||||||
@ -1074,8 +1118,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/opentherm_gw/ @mvn23
|
/tests/components/opentherm_gw/ @mvn23
|
||||||
/homeassistant/components/openuv/ @bachya
|
/homeassistant/components/openuv/ @bachya
|
||||||
/tests/components/openuv/ @bachya
|
/tests/components/openuv/ @bachya
|
||||||
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi
|
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||||
/tests/components/openweathermap/ @fabaff @freekode @nzapponi
|
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
|
||||||
/homeassistant/components/opnsense/ @mtreinish
|
/homeassistant/components/opnsense/ @mtreinish
|
||||||
/tests/components/opnsense/ @mtreinish
|
/tests/components/opnsense/ @mtreinish
|
||||||
/homeassistant/components/opower/ @tronikos
|
/homeassistant/components/opower/ @tronikos
|
||||||
@ -1089,8 +1133,10 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/otbr/ @home-assistant/core
|
/tests/components/otbr/ @home-assistant/core
|
||||||
/homeassistant/components/ourgroceries/ @OnFreund
|
/homeassistant/components/ourgroceries/ @OnFreund
|
||||||
/tests/components/ourgroceries/ @OnFreund
|
/tests/components/ourgroceries/ @OnFreund
|
||||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
/homeassistant/components/overkiz/ @imicknl
|
||||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
/tests/components/overkiz/ @imicknl
|
||||||
|
/homeassistant/components/overseerr/ @joostlek
|
||||||
|
/tests/components/overseerr/ @joostlek
|
||||||
/homeassistant/components/ovo_energy/ @timmo001
|
/homeassistant/components/ovo_energy/ @timmo001
|
||||||
/tests/components/ovo_energy/ @timmo001
|
/tests/components/ovo_energy/ @timmo001
|
||||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||||
@ -1099,6 +1145,10 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/palazzetti/ @dotvav
|
/tests/components/palazzetti/ @dotvav
|
||||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||||
/tests/components/panel_custom/ @home-assistant/frontend
|
/tests/components/panel_custom/ @home-assistant/frontend
|
||||||
|
/homeassistant/components/paperless_ngx/ @fvgarrel
|
||||||
|
/tests/components/paperless_ngx/ @fvgarrel
|
||||||
|
/homeassistant/components/peblar/ @frenck
|
||||||
|
/tests/components/peblar/ @frenck
|
||||||
/homeassistant/components/peco/ @IceBotYT
|
/homeassistant/components/peco/ @IceBotYT
|
||||||
/tests/components/peco/ @IceBotYT
|
/tests/components/peco/ @IceBotYT
|
||||||
/homeassistant/components/pegel_online/ @mib1185
|
/homeassistant/components/pegel_online/ @mib1185
|
||||||
@ -1107,30 +1157,38 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/permobil/ @IsakNyberg
|
/tests/components/permobil/ @IsakNyberg
|
||||||
/homeassistant/components/persistent_notification/ @home-assistant/core
|
/homeassistant/components/persistent_notification/ @home-assistant/core
|
||||||
/tests/components/persistent_notification/ @home-assistant/core
|
/tests/components/persistent_notification/ @home-assistant/core
|
||||||
|
/homeassistant/components/pglab/ @pglab-electronics
|
||||||
|
/tests/components/pglab/ @pglab-electronics
|
||||||
/homeassistant/components/philips_js/ @elupus
|
/homeassistant/components/philips_js/ @elupus
|
||||||
/tests/components/philips_js/ @elupus
|
/tests/components/philips_js/ @elupus
|
||||||
/homeassistant/components/pi_hole/ @shenxn
|
/homeassistant/components/pi_hole/ @shenxn
|
||||||
/tests/components/pi_hole/ @shenxn
|
/tests/components/pi_hole/ @shenxn
|
||||||
/homeassistant/components/picnic/ @corneyl
|
/homeassistant/components/picnic/ @corneyl @codesalatdev
|
||||||
/tests/components/picnic/ @corneyl
|
/tests/components/picnic/ @corneyl @codesalatdev
|
||||||
/homeassistant/components/ping/ @jpbede
|
/homeassistant/components/ping/ @jpbede
|
||||||
/tests/components/ping/ @jpbede
|
/tests/components/ping/ @jpbede
|
||||||
/homeassistant/components/plaato/ @JohNan
|
/homeassistant/components/plaato/ @JohNan
|
||||||
/tests/components/plaato/ @JohNan
|
/tests/components/plaato/ @JohNan
|
||||||
|
/homeassistant/components/playstation_network/ @jackjpowell @tr4nt0r
|
||||||
|
/tests/components/playstation_network/ @jackjpowell @tr4nt0r
|
||||||
/homeassistant/components/plex/ @jjlawren
|
/homeassistant/components/plex/ @jjlawren
|
||||||
/tests/components/plex/ @jjlawren
|
/tests/components/plex/ @jjlawren
|
||||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck
|
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||||
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck
|
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||||
/homeassistant/components/point/ @fredrike
|
/homeassistant/components/point/ @fredrike
|
||||||
/tests/components/point/ @fredrike
|
/tests/components/point/ @fredrike
|
||||||
/homeassistant/components/poolsense/ @haemishkyd
|
/homeassistant/components/poolsense/ @haemishkyd
|
||||||
/tests/components/poolsense/ @haemishkyd
|
/tests/components/poolsense/ @haemishkyd
|
||||||
|
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||||
|
/tests/components/powerfox/ @klaasnicolaas
|
||||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||||
/homeassistant/components/private_ble_device/ @Jc2k
|
/homeassistant/components/private_ble_device/ @Jc2k
|
||||||
/tests/components/private_ble_device/ @Jc2k
|
/tests/components/private_ble_device/ @Jc2k
|
||||||
|
/homeassistant/components/probe_plus/ @pantherale0
|
||||||
|
/tests/components/probe_plus/ @pantherale0
|
||||||
/homeassistant/components/profiler/ @bdraco
|
/homeassistant/components/profiler/ @bdraco
|
||||||
/tests/components/profiler/ @bdraco
|
/tests/components/profiler/ @bdraco
|
||||||
/homeassistant/components/progettihwsw/ @ardaseremet
|
/homeassistant/components/progettihwsw/ @ardaseremet
|
||||||
@ -1146,6 +1204,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/prusalink/ @balloob
|
/tests/components/prusalink/ @balloob
|
||||||
/homeassistant/components/ps4/ @ktnrg45
|
/homeassistant/components/ps4/ @ktnrg45
|
||||||
/tests/components/ps4/ @ktnrg45
|
/tests/components/ps4/ @ktnrg45
|
||||||
|
/homeassistant/components/pterodactyl/ @elmurato
|
||||||
|
/tests/components/pterodactyl/ @elmurato
|
||||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||||
/tests/components/pure_energie/ @klaasnicolaas
|
/tests/components/pure_energie/ @klaasnicolaas
|
||||||
/homeassistant/components/purpleair/ @bachya
|
/homeassistant/components/purpleair/ @bachya
|
||||||
@ -1164,6 +1224,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/pyload/ @tr4nt0r
|
/tests/components/pyload/ @tr4nt0r
|
||||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
|
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||||
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
|
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||||
|
/homeassistant/components/qbus/ @Qbus-iot @thomasddn
|
||||||
|
/tests/components/qbus/ @Qbus-iot @thomasddn
|
||||||
/homeassistant/components/qingping/ @bdraco
|
/homeassistant/components/qingping/ @bdraco
|
||||||
/tests/components/qingping/ @bdraco
|
/tests/components/qingping/ @bdraco
|
||||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||||
@ -1173,6 +1235,7 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/qnap_qsw/ @Noltari
|
/homeassistant/components/qnap_qsw/ @Noltari
|
||||||
/tests/components/qnap_qsw/ @Noltari
|
/tests/components/qnap_qsw/ @Noltari
|
||||||
/homeassistant/components/quantum_gateway/ @cisasteelersfan
|
/homeassistant/components/quantum_gateway/ @cisasteelersfan
|
||||||
|
/tests/components/quantum_gateway/ @cisasteelersfan
|
||||||
/homeassistant/components/qvr_pro/ @oblogic7
|
/homeassistant/components/qvr_pro/ @oblogic7
|
||||||
/homeassistant/components/qwikswitch/ @kellerza
|
/homeassistant/components/qwikswitch/ @kellerza
|
||||||
/tests/components/qwikswitch/ @kellerza
|
/tests/components/qwikswitch/ @kellerza
|
||||||
@ -1211,8 +1274,12 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/recovery_mode/ @home-assistant/core
|
/tests/components/recovery_mode/ @home-assistant/core
|
||||||
/homeassistant/components/refoss/ @ashionky
|
/homeassistant/components/refoss/ @ashionky
|
||||||
/tests/components/refoss/ @ashionky
|
/tests/components/refoss/ @ashionky
|
||||||
|
/homeassistant/components/rehlko/ @bdraco @peterager
|
||||||
|
/tests/components/rehlko/ @bdraco @peterager
|
||||||
/homeassistant/components/remote/ @home-assistant/core
|
/homeassistant/components/remote/ @home-assistant/core
|
||||||
/tests/components/remote/ @home-assistant/core
|
/tests/components/remote/ @home-assistant/core
|
||||||
|
/homeassistant/components/remote_calendar/ @Thomas55555 @allenporter
|
||||||
|
/tests/components/remote_calendar/ @Thomas55555 @allenporter
|
||||||
/homeassistant/components/renault/ @epenet
|
/homeassistant/components/renault/ @epenet
|
||||||
/tests/components/renault/ @epenet
|
/tests/components/renault/ @epenet
|
||||||
/homeassistant/components/renson/ @jimmyd-be
|
/homeassistant/components/renson/ @jimmyd-be
|
||||||
@ -1240,8 +1307,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||||
/homeassistant/components/rmvtransport/ @cgtobi
|
/homeassistant/components/rmvtransport/ @cgtobi
|
||||||
/tests/components/rmvtransport/ @cgtobi
|
/tests/components/rmvtransport/ @cgtobi
|
||||||
/homeassistant/components/roborock/ @Lash-L
|
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||||
/tests/components/roborock/ @Lash-L
|
/tests/components/roborock/ @Lash-L @allenporter
|
||||||
/homeassistant/components/roku/ @ctalkington
|
/homeassistant/components/roku/ @ctalkington
|
||||||
/tests/components/roku/ @ctalkington
|
/tests/components/roku/ @ctalkington
|
||||||
/homeassistant/components/romy/ @xeniter
|
/homeassistant/components/romy/ @xeniter
|
||||||
@ -1254,12 +1321,11 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/rpi_power/ @shenxn @swetoast
|
/tests/components/rpi_power/ @shenxn @swetoast
|
||||||
/homeassistant/components/rss_feed_template/ @home-assistant/core
|
/homeassistant/components/rss_feed_template/ @home-assistant/core
|
||||||
/tests/components/rss_feed_template/ @home-assistant/core
|
/tests/components/rss_feed_template/ @home-assistant/core
|
||||||
/homeassistant/components/rtsp_to_webrtc/ @allenporter
|
|
||||||
/tests/components/rtsp_to_webrtc/ @allenporter
|
|
||||||
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||||
/homeassistant/components/russound_rio/ @noahhusby
|
/homeassistant/components/russound_rio/ @noahhusby
|
||||||
/tests/components/russound_rio/ @noahhusby
|
/tests/components/russound_rio/ @noahhusby
|
||||||
|
/homeassistant/components/russound_rnet/ @noahhusby
|
||||||
/homeassistant/components/ruuvi_gateway/ @akx
|
/homeassistant/components/ruuvi_gateway/ @akx
|
||||||
/tests/components/ruuvi_gateway/ @akx
|
/tests/components/ruuvi_gateway/ @akx
|
||||||
/homeassistant/components/ruuvitag_ble/ @akx
|
/homeassistant/components/ruuvitag_ble/ @akx
|
||||||
@ -1304,6 +1370,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/sensorpro/ @bdraco
|
/tests/components/sensorpro/ @bdraco
|
||||||
/homeassistant/components/sensorpush/ @bdraco
|
/homeassistant/components/sensorpush/ @bdraco
|
||||||
/tests/components/sensorpush/ @bdraco
|
/tests/components/sensorpush/ @bdraco
|
||||||
|
/homeassistant/components/sensorpush_cloud/ @sstallion
|
||||||
|
/tests/components/sensorpush_cloud/ @sstallion
|
||||||
/homeassistant/components/sensoterra/ @markruys
|
/homeassistant/components/sensoterra/ @markruys
|
||||||
/tests/components/sensoterra/ @markruys
|
/tests/components/sensoterra/ @markruys
|
||||||
/homeassistant/components/sentry/ @dcramer @frenck
|
/homeassistant/components/sentry/ @dcramer @frenck
|
||||||
@ -1339,7 +1407,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/siren/ @home-assistant/core @raman325
|
/homeassistant/components/siren/ @home-assistant/core @raman325
|
||||||
/tests/components/siren/ @home-assistant/core @raman325
|
/tests/components/siren/ @home-assistant/core @raman325
|
||||||
/homeassistant/components/sisyphus/ @jkeljo
|
/homeassistant/components/sisyphus/ @jkeljo
|
||||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||||
|
/tests/components/sky_remote/ @dunnmj @saty9
|
||||||
/homeassistant/components/skybell/ @tkdrob
|
/homeassistant/components/skybell/ @tkdrob
|
||||||
/tests/components/skybell/ @tkdrob
|
/tests/components/skybell/ @tkdrob
|
||||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||||
@ -1347,14 +1416,20 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||||
/homeassistant/components/slide/ @ualex73
|
/homeassistant/components/slide/ @ualex73
|
||||||
|
/homeassistant/components/slide_local/ @dontinelli
|
||||||
|
/tests/components/slide_local/ @dontinelli
|
||||||
/homeassistant/components/slimproto/ @marcelveldt
|
/homeassistant/components/slimproto/ @marcelveldt
|
||||||
/tests/components/slimproto/ @marcelveldt
|
/tests/components/slimproto/ @marcelveldt
|
||||||
/homeassistant/components/sma/ @kellerza @rklomp
|
/homeassistant/components/sma/ @kellerza @rklomp @erwindouna
|
||||||
/tests/components/sma/ @kellerza @rklomp
|
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||||
/homeassistant/components/smappee/ @bsmappee
|
/homeassistant/components/smappee/ @bsmappee
|
||||||
/tests/components/smappee/ @bsmappee
|
/tests/components/smappee/ @bsmappee
|
||||||
|
/homeassistant/components/smarla/ @explicatis @rlint-explicatis
|
||||||
|
/tests/components/smarla/ @explicatis @rlint-explicatis
|
||||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||||
|
/homeassistant/components/smartthings/ @joostlek
|
||||||
|
/tests/components/smartthings/ @joostlek
|
||||||
/homeassistant/components/smarttub/ @mdz
|
/homeassistant/components/smarttub/ @mdz
|
||||||
/tests/components/smarttub/ @mdz
|
/tests/components/smarttub/ @mdz
|
||||||
/homeassistant/components/smarty/ @z0mbieprocess
|
/homeassistant/components/smarty/ @z0mbieprocess
|
||||||
@ -1369,6 +1444,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/snapcast/ @luar123
|
/tests/components/snapcast/ @luar123
|
||||||
/homeassistant/components/snmp/ @nmaggioni
|
/homeassistant/components/snmp/ @nmaggioni
|
||||||
/tests/components/snmp/ @nmaggioni
|
/tests/components/snmp/ @nmaggioni
|
||||||
|
/homeassistant/components/snoo/ @Lash-L
|
||||||
|
/tests/components/snoo/ @Lash-L
|
||||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||||
/tests/components/snooz/ @AustinBrunkhorst
|
/tests/components/snooz/ @AustinBrunkhorst
|
||||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||||
@ -1376,10 +1453,10 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||||
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
||||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||||
/homeassistant/components/solax/ @squishykid
|
/homeassistant/components/solax/ @squishykid @Darsstar
|
||||||
/tests/components/solax/ @squishykid
|
/tests/components/solax/ @squishykid @Darsstar
|
||||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
/homeassistant/components/soma/ @ratsept
|
||||||
/tests/components/soma/ @ratsept @sebfortier2288
|
/tests/components/soma/ @ratsept
|
||||||
/homeassistant/components/sonarr/ @ctalkington
|
/homeassistant/components/sonarr/ @ctalkington
|
||||||
/tests/components/sonarr/ @ctalkington
|
/tests/components/sonarr/ @ctalkington
|
||||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||||
@ -1405,15 +1482,14 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/starline/ @anonym-tsk
|
/tests/components/starline/ @anonym-tsk
|
||||||
/homeassistant/components/starlink/ @boswelja
|
/homeassistant/components/starlink/ @boswelja
|
||||||
/tests/components/starlink/ @boswelja
|
/tests/components/starlink/ @boswelja
|
||||||
/homeassistant/components/statistics/ @ThomDietrich
|
/homeassistant/components/statistics/ @ThomDietrich @gjohansson-ST
|
||||||
/tests/components/statistics/ @ThomDietrich
|
/tests/components/statistics/ @ThomDietrich @gjohansson-ST
|
||||||
/homeassistant/components/steam_online/ @tkdrob
|
/homeassistant/components/steam_online/ @tkdrob
|
||||||
/tests/components/steam_online/ @tkdrob
|
/tests/components/steam_online/ @tkdrob
|
||||||
/homeassistant/components/steamist/ @bdraco
|
/homeassistant/components/steamist/ @bdraco
|
||||||
/tests/components/steamist/ @bdraco
|
/tests/components/steamist/ @bdraco
|
||||||
/homeassistant/components/stiebel_eltron/ @fucm
|
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||||
/homeassistant/components/stookalert/ @fwestenberg @frenck
|
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||||
/tests/components/stookalert/ @fwestenberg @frenck
|
|
||||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||||
/tests/components/stookwijzer/ @fwestenberg
|
/tests/components/stookwijzer/ @fwestenberg
|
||||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||||
@ -1424,10 +1500,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/subaru/ @G-Two
|
/tests/components/subaru/ @G-Two
|
||||||
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||||
/tests/components/suez_water/ @ooii @jb101010-2
|
/tests/components/suez_water/ @ooii @jb101010-2
|
||||||
/homeassistant/components/sun/ @Swamp-Ig
|
/homeassistant/components/sun/ @home-assistant/core
|
||||||
/tests/components/sun/ @Swamp-Ig
|
/tests/components/sun/ @home-assistant/core
|
||||||
/homeassistant/components/sunweg/ @rokam
|
|
||||||
/tests/components/sunweg/ @rokam
|
|
||||||
/homeassistant/components/supla/ @mwegrzynek
|
/homeassistant/components/supla/ @mwegrzynek
|
||||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||||
@ -1440,8 +1514,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/switch_as_x/ @home-assistant/core
|
/tests/components/switch_as_x/ @home-assistant/core
|
||||||
/homeassistant/components/switchbee/ @jafar-atili
|
/homeassistant/components/switchbee/ @jafar-atili
|
||||||
/tests/components/switchbee/ @jafar-atili
|
/tests/components/switchbee/ @jafar-atili
|
||||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||||
@ -1458,8 +1532,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/system_bridge/ @timmo001
|
/tests/components/system_bridge/ @timmo001
|
||||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||||
/tests/components/systemmonitor/ @gjohansson-ST
|
/tests/components/systemmonitor/ @gjohansson-ST
|
||||||
/homeassistant/components/tado/ @chiefdragon @erwindouna
|
/homeassistant/components/tado/ @erwindouna
|
||||||
/tests/components/tado/ @chiefdragon @erwindouna
|
/tests/components/tado/ @erwindouna
|
||||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||||
/tests/components/tag/ @balloob @dmulcahey
|
/tests/components/tag/ @balloob @dmulcahey
|
||||||
/homeassistant/components/tailscale/ @frenck
|
/homeassistant/components/tailscale/ @frenck
|
||||||
@ -1479,10 +1553,12 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/technove/ @Moustachauve
|
/tests/components/technove/ @Moustachauve
|
||||||
/homeassistant/components/tedee/ @patrickhilker @zweckj
|
/homeassistant/components/tedee/ @patrickhilker @zweckj
|
||||||
/tests/components/tedee/ @patrickhilker @zweckj
|
/tests/components/tedee/ @patrickhilker @zweckj
|
||||||
|
/homeassistant/components/telegram_bot/ @hanwg
|
||||||
|
/tests/components/telegram_bot/ @hanwg
|
||||||
/homeassistant/components/tellduslive/ @fredrike
|
/homeassistant/components/tellduslive/ @fredrike
|
||||||
/tests/components/tellduslive/ @fredrike
|
/tests/components/tellduslive/ @fredrike
|
||||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
/homeassistant/components/template/ @Petro31 @home-assistant/core
|
||||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
/tests/components/template/ @Petro31 @home-assistant/core
|
||||||
/homeassistant/components/tesla_fleet/ @Bre77
|
/homeassistant/components/tesla_fleet/ @Bre77
|
||||||
/tests/components/tesla_fleet/ @Bre77
|
/tests/components/tesla_fleet/ @Bre77
|
||||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||||
@ -1508,6 +1584,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/tile/ @bachya
|
/tests/components/tile/ @bachya
|
||||||
/homeassistant/components/tilt_ble/ @apt-itude
|
/homeassistant/components/tilt_ble/ @apt-itude
|
||||||
/tests/components/tilt_ble/ @apt-itude
|
/tests/components/tilt_ble/ @apt-itude
|
||||||
|
/homeassistant/components/tilt_pi/ @michaelheyman
|
||||||
|
/tests/components/tilt_pi/ @michaelheyman
|
||||||
/homeassistant/components/time/ @home-assistant/core
|
/homeassistant/components/time/ @home-assistant/core
|
||||||
/tests/components/time/ @home-assistant/core
|
/tests/components/time/ @home-assistant/core
|
||||||
/homeassistant/components/time_date/ @fabaff
|
/homeassistant/components/time_date/ @fabaff
|
||||||
@ -1553,8 +1631,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/triggercmd/ @rvmey
|
/tests/components/triggercmd/ @rvmey
|
||||||
/homeassistant/components/tts/ @home-assistant/core
|
/homeassistant/components/tts/ @home-assistant/core
|
||||||
/tests/components/tts/ @home-assistant/core
|
/tests/components/tts/ @home-assistant/core
|
||||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
/homeassistant/components/tuya/ @Tuya @zlinoliver
|
||||||
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
/tests/components/tuya/ @Tuya @zlinoliver
|
||||||
/homeassistant/components/twentemilieu/ @frenck
|
/homeassistant/components/twentemilieu/ @frenck
|
||||||
/tests/components/twentemilieu/ @frenck
|
/tests/components/twentemilieu/ @frenck
|
||||||
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
|
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
|
||||||
@ -1567,6 +1645,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/unifi/ @Kane610
|
/tests/components/unifi/ @Kane610
|
||||||
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
||||||
/homeassistant/components/unifiled/ @florisvdk
|
/homeassistant/components/unifiled/ @florisvdk
|
||||||
|
/homeassistant/components/unifiprotect/ @RaHehl
|
||||||
|
/tests/components/unifiprotect/ @RaHehl
|
||||||
/homeassistant/components/upb/ @gwww
|
/homeassistant/components/upb/ @gwww
|
||||||
/tests/components/upb/ @gwww
|
/tests/components/upb/ @gwww
|
||||||
/homeassistant/components/upc_connect/ @pvizeli @fabaff
|
/homeassistant/components/upc_connect/ @pvizeli @fabaff
|
||||||
@ -1594,17 +1674,19 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||||
/homeassistant/components/valve/ @home-assistant/core
|
/homeassistant/components/valve/ @home-assistant/core
|
||||||
/tests/components/valve/ @home-assistant/core
|
/tests/components/valve/ @home-assistant/core
|
||||||
|
/homeassistant/components/vegehub/ @ghowevege
|
||||||
|
/tests/components/vegehub/ @ghowevege
|
||||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||||
/tests/components/velbus/ @Cereal2nd @brefra
|
/tests/components/velbus/ @Cereal2nd @brefra
|
||||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum
|
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||||
/tests/components/velux/ @Julius2342 @DeerMaximum
|
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||||
/tests/components/venstar/ @garbled1 @jhollowe
|
/tests/components/venstar/ @garbled1 @jhollowe
|
||||||
/homeassistant/components/versasense/ @imstevenxyz
|
/homeassistant/components/versasense/ @imstevenxyz
|
||||||
/homeassistant/components/version/ @ludeeus
|
/homeassistant/components/version/ @ludeeus
|
||||||
/tests/components/version/ @ludeeus
|
/tests/components/version/ @ludeeus
|
||||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja
|
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja
|
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||||
/homeassistant/components/vicare/ @CFenner
|
/homeassistant/components/vicare/ @CFenner
|
||||||
/tests/components/vicare/ @CFenner
|
/tests/components/vicare/ @CFenner
|
||||||
/homeassistant/components/vilfo/ @ManneW
|
/homeassistant/components/vilfo/ @ManneW
|
||||||
@ -1616,8 +1698,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||||
/homeassistant/components/voip/ @balloob @synesthesiam
|
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||||
/tests/components/voip/ @balloob @synesthesiam
|
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||||
/homeassistant/components/volumio/ @OnFreund
|
/homeassistant/components/volumio/ @OnFreund
|
||||||
/tests/components/volumio/ @OnFreund
|
/tests/components/volumio/ @OnFreund
|
||||||
/homeassistant/components/volvooncall/ @molobrakos
|
/homeassistant/components/volvooncall/ @molobrakos
|
||||||
@ -1634,6 +1716,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/waqi/ @joostlek
|
/tests/components/waqi/ @joostlek
|
||||||
/homeassistant/components/water_heater/ @home-assistant/core
|
/homeassistant/components/water_heater/ @home-assistant/core
|
||||||
/tests/components/water_heater/ @home-assistant/core
|
/tests/components/water_heater/ @home-assistant/core
|
||||||
|
/homeassistant/components/watergate/ @adam-the-hero
|
||||||
|
/tests/components/watergate/ @adam-the-hero
|
||||||
/homeassistant/components/watson_tts/ @rutkai
|
/homeassistant/components/watson_tts/ @rutkai
|
||||||
/homeassistant/components/watttime/ @bachya
|
/homeassistant/components/watttime/ @bachya
|
||||||
/tests/components/watttime/ @bachya
|
/tests/components/watttime/ @bachya
|
||||||
@ -1647,6 +1731,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/weatherflow_cloud/ @jeeftor
|
/tests/components/weatherflow_cloud/ @jeeftor
|
||||||
/homeassistant/components/weatherkit/ @tjhorner
|
/homeassistant/components/weatherkit/ @tjhorner
|
||||||
/tests/components/weatherkit/ @tjhorner
|
/tests/components/weatherkit/ @tjhorner
|
||||||
|
/homeassistant/components/webdav/ @jpbede
|
||||||
|
/tests/components/webdav/ @jpbede
|
||||||
/homeassistant/components/webhook/ @home-assistant/core
|
/homeassistant/components/webhook/ @home-assistant/core
|
||||||
/tests/components/webhook/ @home-assistant/core
|
/tests/components/webhook/ @home-assistant/core
|
||||||
/homeassistant/components/webmin/ @autinerd
|
/homeassistant/components/webmin/ @autinerd
|
||||||
@ -1718,6 +1804,7 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/youless/ @gjong
|
/tests/components/youless/ @gjong
|
||||||
/homeassistant/components/youtube/ @joostlek
|
/homeassistant/components/youtube/ @joostlek
|
||||||
/tests/components/youtube/ @joostlek
|
/tests/components/youtube/ @joostlek
|
||||||
|
/homeassistant/components/zabbix/ @kruton
|
||||||
/homeassistant/components/zamg/ @killer0071234
|
/homeassistant/components/zamg/ @killer0071234
|
||||||
/tests/components/zamg/ @killer0071234
|
/tests/components/zamg/ @killer0071234
|
||||||
/homeassistant/components/zengge/ @emontnemery
|
/homeassistant/components/zengge/ @emontnemery
|
||||||
@ -1729,6 +1816,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/zeversolar/ @kvanzuijlen
|
/tests/components/zeversolar/ @kvanzuijlen
|
||||||
/homeassistant/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
/homeassistant/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
||||||
/tests/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
/tests/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
|
||||||
|
/homeassistant/components/zimi/ @markhannon
|
||||||
|
/tests/components/zimi/ @markhannon
|
||||||
/homeassistant/components/zodiac/ @JulienTant
|
/homeassistant/components/zodiac/ @JulienTant
|
||||||
/tests/components/zodiac/ @JulienTant
|
/tests/components/zodiac/ @JulienTant
|
||||||
/homeassistant/components/zone/ @home-assistant/core
|
/homeassistant/components/zone/ @home-assistant/core
|
||||||
|
38
Dockerfile
generated
38
Dockerfile
generated
@ -12,8 +12,26 @@ ENV \
|
|||||||
|
|
||||||
ARG QEMU_CPU
|
ARG QEMU_CPU
|
||||||
|
|
||||||
|
# Home Assistant S6-Overlay
|
||||||
|
COPY rootfs /
|
||||||
|
|
||||||
|
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||||
|
ARG BUILD_ARCH
|
||||||
|
# Get go2rtc binary
|
||||||
|
RUN \
|
||||||
|
case "${BUILD_ARCH}" in \
|
||||||
|
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||||
|
"armhf") go2rtc_suffix='armv6' ;; \
|
||||||
|
"armv7") go2rtc_suffix='arm' ;; \
|
||||||
|
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||||
|
esac \
|
||||||
|
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||||
|
&& chmod +x /bin/go2rtc \
|
||||||
|
# Verify go2rtc can be executed
|
||||||
|
&& go2rtc --version
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.4.28
|
RUN pip3 install uv==0.7.1
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
@ -42,22 +60,4 @@ RUN \
|
|||||||
&& python3 -m compileall \
|
&& python3 -m compileall \
|
||||||
homeassistant/homeassistant
|
homeassistant/homeassistant
|
||||||
|
|
||||||
# Home Assistant S6-Overlay
|
|
||||||
COPY rootfs /
|
|
||||||
|
|
||||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
|
||||||
ARG BUILD_ARCH
|
|
||||||
# Get go2rtc binary
|
|
||||||
RUN \
|
|
||||||
case "${BUILD_ARCH}" in \
|
|
||||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
|
||||||
"armhf") go2rtc_suffix='armv6' ;; \
|
|
||||||
"armv7") go2rtc_suffix='arm' ;; \
|
|
||||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
|
||||||
esac \
|
|
||||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
|
||||||
&& chmod +x /bin/go2rtc \
|
|
||||||
# Verify go2rtc can be executed
|
|
||||||
&& go2rtc --version
|
|
||||||
|
|
||||||
WORKDIR /config
|
WORKDIR /config
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM mcr.microsoft.com/devcontainers/python:1-3.12
|
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||||
|
|
||||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||||
|
|
||||||
@ -35,6 +35,9 @@ RUN \
|
|||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Add go2rtc binary
|
||||||
|
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv
|
RUN pip3 install uv
|
||||||
|
|
||||||
|
12
build.yaml
12
build.yaml
@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
|
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
|
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
|
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
@ -19,4 +19,4 @@ labels:
|
|||||||
org.opencontainers.image.authors: The Home Assistant Authors
|
org.opencontainers.image.authors: The Home Assistant Authors
|
||||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||||
org.opencontainers.image.licenses: Apache License 2.0
|
org.opencontainers.image.licenses: Apache-2.0
|
||||||
|
@ -38,8 +38,7 @@ def validate_python() -> None:
|
|||||||
|
|
||||||
def ensure_config_path(config_dir: str) -> None:
|
def ensure_config_path(config_dir: str) -> None:
|
||||||
"""Validate the configuration directory."""
|
"""Validate the configuration directory."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import config as config_util # noqa: PLC0415
|
||||||
from . import config as config_util
|
|
||||||
|
|
||||||
lib_dir = os.path.join(config_dir, "deps")
|
lib_dir = os.path.join(config_dir, "deps")
|
||||||
|
|
||||||
@ -80,8 +79,7 @@ def ensure_config_path(config_dir: str) -> None:
|
|||||||
|
|
||||||
def get_arguments() -> argparse.Namespace:
|
def get_arguments() -> argparse.Namespace:
|
||||||
"""Get parsed passed in arguments."""
|
"""Get parsed passed in arguments."""
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import config as config_util # noqa: PLC0415
|
||||||
from . import config as config_util
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Home Assistant: Observe, Control, Automate.",
|
description="Home Assistant: Observe, Control, Automate.",
|
||||||
@ -177,8 +175,7 @@ def main() -> int:
|
|||||||
validate_os()
|
validate_os()
|
||||||
|
|
||||||
if args.script is not None:
|
if args.script is not None:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import scripts # noqa: PLC0415
|
||||||
from . import scripts
|
|
||||||
|
|
||||||
return scripts.run(args.script)
|
return scripts.run(args.script)
|
||||||
|
|
||||||
@ -188,8 +185,7 @@ def main() -> int:
|
|||||||
|
|
||||||
ensure_config_path(config_dir)
|
ensure_config_path(config_dir)
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from . import config, runner # noqa: PLC0415
|
||||||
from . import config, runner
|
|
||||||
|
|
||||||
safe_mode = config.safe_mode_enabled(config_dir)
|
safe_mode = config.safe_mode_enabled(config_dir)
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ class AuthManagerFlowManager(
|
|||||||
*,
|
*,
|
||||||
context: AuthFlowContext | None = None,
|
context: AuthFlowContext | None = None,
|
||||||
data: dict[str, Any] | None = None,
|
data: dict[str, Any] | None = None,
|
||||||
) -> LoginFlow:
|
) -> LoginFlow[Any]:
|
||||||
"""Create a login flow."""
|
"""Create a login flow."""
|
||||||
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
|
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
|
||||||
if not auth_provider:
|
if not auth_provider:
|
||||||
|
@ -308,7 +308,7 @@ class AuthStore:
|
|||||||
credentials.data = data
|
credentials.data = data
|
||||||
self._async_schedule_save()
|
self._async_schedule_save()
|
||||||
|
|
||||||
async def async_load(self) -> None: # noqa: C901
|
async def async_load(self) -> None:
|
||||||
"""Load the users."""
|
"""Load the users."""
|
||||||
if self._loaded:
|
if self._loaded:
|
||||||
raise RuntimeError("Auth storage is already loaded")
|
raise RuntimeError("Auth storage is already loaded")
|
||||||
|
@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads
|
|||||||
JWT_TOKEN_CACHE_SIZE = 16
|
JWT_TOKEN_CACHE_SIZE = 16
|
||||||
MAX_TOKEN_SIZE = 8192
|
MAX_TOKEN_SIZE = 8192
|
||||||
|
|
||||||
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
|
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti")
|
||||||
|
|
||||||
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
||||||
"require": []
|
"require": []
|
||||||
|
@ -71,7 +71,7 @@ class MultiFactorAuthModule:
|
|||||||
"""Return a voluptuous schema to define mfa auth module's input."""
|
"""Return a voluptuous schema to define mfa auth module's input."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]:
|
||||||
"""Return a data entry flow handler for setup module.
|
"""Return a data entry flow handler for setup module.
|
||||||
|
|
||||||
Mfa module should extend SetupFlow
|
Mfa module should extend SetupFlow
|
||||||
@ -95,11 +95,16 @@ class MultiFactorAuthModule:
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class SetupFlow(data_entry_flow.FlowHandler):
|
class SetupFlow[_MultiFactorAuthModuleT: MultiFactorAuthModule = MultiFactorAuthModule](
|
||||||
|
data_entry_flow.FlowHandler
|
||||||
|
):
|
||||||
"""Handler for the setup flow."""
|
"""Handler for the setup flow."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str
|
self,
|
||||||
|
auth_module: _MultiFactorAuthModuleT,
|
||||||
|
setup_schema: vol.Schema,
|
||||||
|
user_id: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the setup flow."""
|
"""Initialize the setup flow."""
|
||||||
self._auth_module = auth_module
|
self._auth_module = auth_module
|
||||||
|
@ -52,28 +52,28 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def _generate_secret() -> str:
|
def _generate_secret() -> str:
|
||||||
"""Generate a secret."""
|
"""Generate a secret."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return str(pyotp.random_base32())
|
return str(pyotp.random_base32())
|
||||||
|
|
||||||
|
|
||||||
def _generate_random() -> int:
|
def _generate_random() -> int:
|
||||||
"""Generate a 32 digit number."""
|
"""Generate a 32 digit number."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
|
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
|
||||||
|
|
||||||
|
|
||||||
def _generate_otp(secret: str, count: int) -> str:
|
def _generate_otp(secret: str, count: int) -> str:
|
||||||
"""Generate one time password."""
|
"""Generate one time password."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return str(pyotp.HOTP(secret).at(count))
|
return str(pyotp.HOTP(secret).at(count))
|
||||||
|
|
||||||
|
|
||||||
def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
||||||
"""Verify one time password."""
|
"""Verify one time password."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
return bool(pyotp.HOTP(secret).verify(otp, count))
|
return bool(pyotp.HOTP(secret).verify(otp, count))
|
||||||
|
|
||||||
@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
|||||||
|
|
||||||
return sorted(unordered_services)
|
return sorted(unordered_services)
|
||||||
|
|
||||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
async def async_setup_flow(self, user_id: str) -> NotifySetupFlow:
|
||||||
"""Return a data entry flow handler for setup module.
|
"""Return a data entry flow handler for setup module.
|
||||||
|
|
||||||
Mfa module should extend SetupFlow
|
Mfa module should extend SetupFlow
|
||||||
@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
|||||||
await self.hass.services.async_call("notify", notify_service, data)
|
await self.hass.services.async_call("notify", notify_service, data)
|
||||||
|
|
||||||
|
|
||||||
class NotifySetupFlow(SetupFlow):
|
class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||||
"""Handler for the setup flow."""
|
"""Handler for the setup flow."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the setup flow."""
|
"""Initialize the setup flow."""
|
||||||
super().__init__(auth_module, setup_schema, user_id)
|
super().__init__(auth_module, setup_schema, user_id)
|
||||||
# to fix typing complaint
|
|
||||||
self._auth_module: NotifyAuthModule = auth_module
|
|
||||||
self._available_notify_services = available_notify_services
|
self._available_notify_services = available_notify_services
|
||||||
self._secret: str | None = None
|
self._secret: str | None = None
|
||||||
self._count: int | None = None
|
self._count: int | None = None
|
||||||
|
@ -37,7 +37,7 @@ DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
|||||||
|
|
||||||
def _generate_qr_code(data: str) -> str:
|
def _generate_qr_code(data: str) -> str:
|
||||||
"""Generate a base64 PNG string represent QR Code image of data."""
|
"""Generate a base64 PNG string represent QR Code image of data."""
|
||||||
import pyqrcode # pylint: disable=import-outside-toplevel
|
import pyqrcode # noqa: PLC0415
|
||||||
|
|
||||||
qr_code = pyqrcode.create(data)
|
qr_code = pyqrcode.create(data)
|
||||||
|
|
||||||
@ -59,7 +59,7 @@ def _generate_qr_code(data: str) -> str:
|
|||||||
|
|
||||||
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
|
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
|
||||||
"""Generate a secret, url, and QR code."""
|
"""Generate a secret, url, and QR code."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
ota_secret = pyotp.random_base32()
|
ota_secret = pyotp.random_base32()
|
||||||
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
|
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
|
||||||
@ -107,14 +107,14 @@ class TotpAuthModule(MultiFactorAuthModule):
|
|||||||
|
|
||||||
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
|
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
|
||||||
"""Create a ota_secret for user."""
|
"""Create a ota_secret for user."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
ota_secret: str = secret or pyotp.random_base32()
|
ota_secret: str = secret or pyotp.random_base32()
|
||||||
|
|
||||||
self._users[user_id] = ota_secret # type: ignore[index]
|
self._users[user_id] = ota_secret # type: ignore[index]
|
||||||
return ota_secret
|
return ota_secret
|
||||||
|
|
||||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
async def async_setup_flow(self, user_id: str) -> TotpSetupFlow:
|
||||||
"""Return a data entry flow handler for setup module.
|
"""Return a data entry flow handler for setup module.
|
||||||
|
|
||||||
Mfa module should extend SetupFlow
|
Mfa module should extend SetupFlow
|
||||||
@ -163,7 +163,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
|||||||
|
|
||||||
def _validate_2fa(self, user_id: str, code: str) -> bool:
|
def _validate_2fa(self, user_id: str, code: str) -> bool:
|
||||||
"""Validate two factor authentication code."""
|
"""Validate two factor authentication code."""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
|
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
|
||||||
# even we cannot find user, we still do verify
|
# even we cannot find user, we still do verify
|
||||||
@ -174,20 +174,19 @@ class TotpAuthModule(MultiFactorAuthModule):
|
|||||||
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
|
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
|
||||||
|
|
||||||
|
|
||||||
class TotpSetupFlow(SetupFlow):
|
class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
||||||
"""Handler for the setup flow."""
|
"""Handler for the setup flow."""
|
||||||
|
|
||||||
|
_ota_secret: str
|
||||||
|
_url: str
|
||||||
|
_image: str
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
|
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the setup flow."""
|
"""Initialize the setup flow."""
|
||||||
super().__init__(auth_module, setup_schema, user.id)
|
super().__init__(auth_module, setup_schema, user.id)
|
||||||
# to fix typing complaint
|
|
||||||
self._auth_module: TotpAuthModule = auth_module
|
|
||||||
self._user = user
|
self._user = user
|
||||||
self._ota_secret: str = ""
|
|
||||||
self._url: str | None = None
|
|
||||||
self._image: str | None = None
|
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
@ -197,7 +196,7 @@ class TotpSetupFlow(SetupFlow):
|
|||||||
Return self.async_show_form(step_id='init') if user_input is None.
|
Return self.async_show_form(step_id='init') if user_input is None.
|
||||||
Return self.async_create_entry(data={'result': result}) if finish.
|
Return self.async_create_entry(data={'result': result}) if finish.
|
||||||
"""
|
"""
|
||||||
import pyotp # pylint: disable=import-outside-toplevel
|
import pyotp # noqa: PLC0415
|
||||||
|
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
@ -214,12 +213,11 @@ class TotpSetupFlow(SetupFlow):
|
|||||||
errors["base"] = "invalid_code"
|
errors["base"] = "invalid_code"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
hass = self._auth_module.hass
|
|
||||||
(
|
(
|
||||||
self._ota_secret,
|
self._ota_secret,
|
||||||
self._url,
|
self._url,
|
||||||
self._image,
|
self._image,
|
||||||
) = await hass.async_add_executor_job(
|
) = await self._auth_module.hass.async_add_executor_job(
|
||||||
_generate_secret_and_qr_code,
|
_generate_secret_and_qr_code,
|
||||||
str(self._user.name),
|
str(self._user.name),
|
||||||
)
|
)
|
||||||
|
@ -11,7 +11,7 @@ import uuid
|
|||||||
import attr
|
import attr
|
||||||
from attr import Attribute
|
from attr import Attribute
|
||||||
from attr.setters import validate
|
from attr.setters import validate
|
||||||
from propcache import cached_property
|
from propcache.api import cached_property
|
||||||
|
|
||||||
from homeassistant.const import __version__
|
from homeassistant.const import __version__
|
||||||
from homeassistant.data_entry_flow import FlowContext, FlowResult
|
from homeassistant.data_entry_flow import FlowContext, FlowResult
|
||||||
|
@ -17,12 +17,12 @@ POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"POLICY_SCHEMA",
|
"POLICY_SCHEMA",
|
||||||
"merge_policies",
|
|
||||||
"PermissionLookup",
|
|
||||||
"PolicyType",
|
|
||||||
"AbstractPermissions",
|
"AbstractPermissions",
|
||||||
"PolicyPermissions",
|
|
||||||
"OwnerPermissions",
|
"OwnerPermissions",
|
||||||
|
"PermissionLookup",
|
||||||
|
"PolicyPermissions",
|
||||||
|
"PolicyType",
|
||||||
|
"merge_policies",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ class AuthProvider:
|
|||||||
|
|
||||||
# Implement by extending class
|
# Implement by extending class
|
||||||
|
|
||||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]:
|
||||||
"""Return the data flow for logging in with auth provider.
|
"""Return the data flow for logging in with auth provider.
|
||||||
|
|
||||||
Auth provider should extend LoginFlow and return an instance.
|
Auth provider should extend LoginFlow and return an instance.
|
||||||
@ -192,12 +192,14 @@ async def load_auth_provider_module(
|
|||||||
return module
|
return module
|
||||||
|
|
||||||
|
|
||||||
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
class LoginFlow[_AuthProviderT: AuthProvider = AuthProvider](
|
||||||
|
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||||
|
):
|
||||||
"""Handler for the login flow."""
|
"""Handler for the login flow."""
|
||||||
|
|
||||||
_flow_result = AuthFlowResult
|
_flow_result = AuthFlowResult
|
||||||
|
|
||||||
def __init__(self, auth_provider: AuthProvider) -> None:
|
def __init__(self, auth_provider: _AuthProviderT) -> None:
|
||||||
"""Initialize the login flow."""
|
"""Initialize the login flow."""
|
||||||
self._auth_provider = auth_provider
|
self._auth_provider = auth_provider
|
||||||
self._auth_module_id: str | None = None
|
self._auth_module_id: str | None = None
|
||||||
|
@ -6,7 +6,7 @@ import asyncio
|
|||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import Any, cast
|
from typing import Any
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
async def async_login_flow(
|
||||||
|
self, context: AuthFlowContext | None
|
||||||
|
) -> CommandLineLoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return CommandLineLoginFlow(self)
|
return CommandLineLoginFlow(self)
|
||||||
|
|
||||||
@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class CommandLineLoginFlow(LoginFlow):
|
class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]):
|
||||||
"""Handler for the login flow."""
|
"""Handler for the login flow."""
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow):
|
|||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
user_input["username"] = user_input["username"].strip()
|
user_input["username"] = user_input["username"].strip()
|
||||||
try:
|
try:
|
||||||
await cast(
|
await self._auth_provider.async_validate_login(
|
||||||
CommandLineAuthProvider, self._auth_provider
|
user_input["username"], user_input["password"]
|
||||||
).async_validate_login(user_input["username"], user_input["password"])
|
)
|
||||||
except InvalidAuthError:
|
except InvalidAuthError:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
|
|
||||||
|
@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
|
|||||||
await data.async_load()
|
await data.async_load()
|
||||||
self.data = data
|
self.data = data
|
||||||
|
|
||||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return HassLoginFlow(self)
|
return HassLoginFlow(self)
|
||||||
|
|
||||||
@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HassLoginFlow(LoginFlow):
|
class HassLoginFlow(LoginFlow[HassAuthProvider]):
|
||||||
"""Handler for the login flow."""
|
"""Handler for the login flow."""
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow):
|
|||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
try:
|
try:
|
||||||
await cast(HassAuthProvider, self._auth_provider).async_validate_login(
|
await self._auth_provider.async_validate_login(
|
||||||
user_input["username"], user_input["password"]
|
user_input["username"], user_input["password"]
|
||||||
)
|
)
|
||||||
except InvalidAuth:
|
except InvalidAuth:
|
||||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
import hmac
|
import hmac
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError):
|
|||||||
class ExampleAuthProvider(AuthProvider):
|
class ExampleAuthProvider(AuthProvider):
|
||||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||||
|
|
||||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
async def async_login_flow(
|
||||||
|
self, context: AuthFlowContext | None
|
||||||
|
) -> ExampleLoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return ExampleLoginFlow(self)
|
return ExampleLoginFlow(self)
|
||||||
|
|
||||||
@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider):
|
|||||||
return UserMeta(name=name, is_active=True)
|
return UserMeta(name=name, is_active=True)
|
||||||
|
|
||||||
|
|
||||||
class ExampleLoginFlow(LoginFlow):
|
class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]):
|
||||||
"""Handler for the login flow."""
|
"""Handler for the login flow."""
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow):
|
|||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
try:
|
try:
|
||||||
cast(ExampleAuthProvider, self._auth_provider).async_validate_login(
|
self._auth_provider.async_validate_login(
|
||||||
user_input["username"], user_input["password"]
|
user_input["username"], user_input["password"]
|
||||||
)
|
)
|
||||||
except InvalidAuthError:
|
except InvalidAuthError:
|
||||||
|
@ -21,7 +21,7 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
import homeassistant.helpers.config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.network import is_cloud_connection
|
from homeassistant.helpers.network import is_cloud_connection
|
||||||
|
|
||||||
from .. import InvalidAuthError
|
from .. import InvalidAuthError
|
||||||
@ -104,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
|||||||
"""Trusted Networks auth provider does not support MFA."""
|
"""Trusted Networks auth provider does not support MFA."""
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
async def async_login_flow(
|
||||||
|
self, context: AuthFlowContext | None
|
||||||
|
) -> TrustedNetworksLoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
assert context is not None
|
assert context is not None
|
||||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||||
@ -214,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
|||||||
self.async_validate_access(ip_address(remote_ip))
|
self.async_validate_access(ip_address(remote_ip))
|
||||||
|
|
||||||
|
|
||||||
class TrustedNetworksLoginFlow(LoginFlow):
|
class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]):
|
||||||
"""Handler for the login flow."""
|
"""Handler for the login flow."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -235,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
|||||||
) -> AuthFlowResult:
|
) -> AuthFlowResult:
|
||||||
"""Handle the step of the form."""
|
"""Handle the step of the form."""
|
||||||
try:
|
try:
|
||||||
cast(
|
self._auth_provider.async_validate_access(self._ip_address)
|
||||||
TrustedNetworksAuthProvider, self._auth_provider
|
|
||||||
).async_validate_access(self._ip_address)
|
|
||||||
|
|
||||||
except InvalidAuthError:
|
except InvalidAuthError:
|
||||||
return self.async_abort(reason="not_allowed")
|
return self.async_abort(reason="not_allowed")
|
||||||
|
@ -1,29 +0,0 @@
|
|||||||
"""Enum backports from standard lib.
|
|
||||||
|
|
||||||
This file contained the backport of the StrEnum of Python 3.11.
|
|
||||||
|
|
||||||
Since we have dropped support for Python 3.10, we can remove this backport.
|
|
||||||
This file is kept for now to avoid breaking custom components that might
|
|
||||||
import it.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from enum import StrEnum as _StrEnum
|
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
from homeassistant.helpers.deprecation import (
|
|
||||||
DeprecatedAlias,
|
|
||||||
all_with_deprecated_constants,
|
|
||||||
check_if_deprecated_constant,
|
|
||||||
dir_with_deprecated_constants,
|
|
||||||
)
|
|
||||||
|
|
||||||
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
|
|
||||||
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
|
|
||||||
|
|
||||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
|
||||||
__dir__ = partial(
|
|
||||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
|
||||||
)
|
|
||||||
__all__ = all_with_deprecated_constants(globals())
|
|
@ -1,31 +0,0 @@
|
|||||||
"""Functools backports from standard lib.
|
|
||||||
|
|
||||||
This file contained the backport of the cached_property implementation of Python 3.12.
|
|
||||||
|
|
||||||
Since we have dropped support for Python 3.11, we can remove this backport.
|
|
||||||
This file is kept for now to avoid breaking custom components that might
|
|
||||||
import it.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
# pylint: disable-next=hass-deprecated-import
|
|
||||||
from functools import cached_property as _cached_property, partial
|
|
||||||
|
|
||||||
from homeassistant.helpers.deprecation import (
|
|
||||||
DeprecatedAlias,
|
|
||||||
all_with_deprecated_constants,
|
|
||||||
check_if_deprecated_constant,
|
|
||||||
dir_with_deprecated_constants,
|
|
||||||
)
|
|
||||||
|
|
||||||
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
|
|
||||||
_DEPRECATED_cached_property = DeprecatedAlias(
|
|
||||||
_cached_property, "functools.cached_property", "2025.5"
|
|
||||||
)
|
|
||||||
|
|
||||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
|
||||||
__dir__ = partial(
|
|
||||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
|
||||||
)
|
|
||||||
__all__ = all_with_deprecated_constants(globals())
|
|
@ -1,6 +1,10 @@
|
|||||||
"""Home Assistant module to handle restoring backups."""
|
"""Home Assistant module to handle restoring backups."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Iterable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -14,7 +18,13 @@ import securetar
|
|||||||
from .const import __version__ as HA_VERSION
|
from .const import __version__ as HA_VERSION
|
||||||
|
|
||||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||||
KEEP_PATHS = ("backups",)
|
RESTORE_BACKUP_RESULT_FILE = ".HA_RESTORE_RESULT"
|
||||||
|
KEEP_BACKUPS = ("backups",)
|
||||||
|
KEEP_DATABASE = (
|
||||||
|
"home-assistant_v2.db",
|
||||||
|
"home-assistant_v2.db-wal",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -24,6 +34,21 @@ class RestoreBackupFileContent:
|
|||||||
"""Definition for restore backup file content."""
|
"""Definition for restore backup file content."""
|
||||||
|
|
||||||
backup_file_path: Path
|
backup_file_path: Path
|
||||||
|
password: str | None
|
||||||
|
remove_after_restore: bool
|
||||||
|
restore_database: bool
|
||||||
|
restore_homeassistant: bool
|
||||||
|
|
||||||
|
|
||||||
|
def password_to_key(password: str) -> bytes:
|
||||||
|
"""Generate a AES Key from password.
|
||||||
|
|
||||||
|
Matches the implementation in supervisor.backups.utils.password_to_key.
|
||||||
|
"""
|
||||||
|
key: bytes = password.encode()
|
||||||
|
for _ in range(100):
|
||||||
|
key = hashlib.sha256(key).digest()
|
||||||
|
return key[:16]
|
||||||
|
|
||||||
|
|
||||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||||
@ -32,20 +57,30 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent |
|
|||||||
try:
|
try:
|
||||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||||
return RestoreBackupFileContent(
|
return RestoreBackupFileContent(
|
||||||
backup_file_path=Path(instruction_content["path"])
|
backup_file_path=Path(instruction_content["path"]),
|
||||||
|
password=instruction_content["password"],
|
||||||
|
remove_after_restore=instruction_content["remove_after_restore"],
|
||||||
|
restore_database=instruction_content["restore_database"],
|
||||||
|
restore_homeassistant=instruction_content["restore_homeassistant"],
|
||||||
)
|
)
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except FileNotFoundError:
|
||||||
return None
|
return None
|
||||||
|
except (KeyError, json.JSONDecodeError) as err:
|
||||||
|
_write_restore_result_file(config_dir, False, err)
|
||||||
|
return None
|
||||||
|
finally:
|
||||||
|
# Always remove the backup instruction file to prevent a boot loop
|
||||||
|
instruction_path.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None:
|
||||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
"""Delete all files and directories in the config directory except entries in the keep list."""
|
||||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
keep_paths = [config_dir.joinpath(path) for path in keep]
|
||||||
config_contents = sorted(
|
entries_to_remove = sorted(
|
||||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
entry for entry in config_dir.iterdir() if entry not in keep_paths
|
||||||
)
|
)
|
||||||
|
|
||||||
for entry in config_contents:
|
for entry in entries_to_remove:
|
||||||
entrypath = config_dir.joinpath(entry)
|
entrypath = config_dir.joinpath(entry)
|
||||||
|
|
||||||
if entrypath.is_file():
|
if entrypath.is_file():
|
||||||
@ -54,12 +89,15 @@ def _clear_configuration_directory(config_dir: Path) -> None:
|
|||||||
shutil.rmtree(entrypath)
|
shutil.rmtree(entrypath)
|
||||||
|
|
||||||
|
|
||||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
def _extract_backup(
|
||||||
|
config_dir: Path,
|
||||||
|
restore_content: RestoreBackupFileContent,
|
||||||
|
) -> None:
|
||||||
"""Extract the backup file to the config directory."""
|
"""Extract the backup file to the config directory."""
|
||||||
with (
|
with (
|
||||||
TemporaryDirectory() as tempdir,
|
TemporaryDirectory() as tempdir,
|
||||||
securetar.SecureTarFile(
|
securetar.SecureTarFile(
|
||||||
backup_file_path,
|
restore_content.backup_file_path,
|
||||||
gzip=False,
|
gzip=False,
|
||||||
mode="r",
|
mode="r",
|
||||||
) as ostf,
|
) as ostf,
|
||||||
@ -85,25 +123,62 @@ def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
|||||||
Path(
|
Path(
|
||||||
tempdir,
|
tempdir,
|
||||||
"extracted",
|
"extracted",
|
||||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
f"homeassistant.tar{'.gz' if backup_meta['compressed'] else ''}",
|
||||||
),
|
),
|
||||||
gzip=backup_meta["compressed"],
|
gzip=backup_meta["compressed"],
|
||||||
|
key=password_to_key(restore_content.password)
|
||||||
|
if restore_content.password is not None
|
||||||
|
else None,
|
||||||
mode="r",
|
mode="r",
|
||||||
) as istf:
|
) as istf:
|
||||||
for member in istf.getmembers():
|
|
||||||
if member.name == "data":
|
|
||||||
continue
|
|
||||||
member.name = member.name.replace("data/", "")
|
|
||||||
_clear_configuration_directory(config_dir)
|
|
||||||
istf.extractall(
|
istf.extractall(
|
||||||
path=config_dir,
|
path=Path(tempdir, "homeassistant"),
|
||||||
members=[
|
members=securetar.secure_path(istf),
|
||||||
member
|
|
||||||
for member in securetar.secure_path(istf)
|
|
||||||
if member.name != "data"
|
|
||||||
],
|
|
||||||
filter="fully_trusted",
|
filter="fully_trusted",
|
||||||
)
|
)
|
||||||
|
if restore_content.restore_homeassistant:
|
||||||
|
keep = list(KEEP_BACKUPS)
|
||||||
|
if not restore_content.restore_database:
|
||||||
|
keep.extend(KEEP_DATABASE)
|
||||||
|
_clear_configuration_directory(config_dir, keep)
|
||||||
|
shutil.copytree(
|
||||||
|
Path(tempdir, "homeassistant", "data"),
|
||||||
|
config_dir,
|
||||||
|
dirs_exist_ok=True,
|
||||||
|
ignore=shutil.ignore_patterns(*(keep)),
|
||||||
|
ignore_dangling_symlinks=True,
|
||||||
|
)
|
||||||
|
elif restore_content.restore_database:
|
||||||
|
for entry in KEEP_DATABASE:
|
||||||
|
entrypath = config_dir / entry
|
||||||
|
|
||||||
|
if entrypath.is_file():
|
||||||
|
entrypath.unlink()
|
||||||
|
elif entrypath.is_dir():
|
||||||
|
shutil.rmtree(entrypath)
|
||||||
|
|
||||||
|
for entry in KEEP_DATABASE:
|
||||||
|
shutil.copy(
|
||||||
|
Path(tempdir, "homeassistant", "data", entry),
|
||||||
|
config_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _write_restore_result_file(
|
||||||
|
config_dir: Path, success: bool, error: Exception | None
|
||||||
|
) -> None:
|
||||||
|
"""Write the restore result file."""
|
||||||
|
result_path = config_dir.joinpath(RESTORE_BACKUP_RESULT_FILE)
|
||||||
|
result_path.write_text(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"success": success,
|
||||||
|
"error": str(error) if error else None,
|
||||||
|
"error_type": str(type(error).__name__) if error else None,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
encoding="utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def restore_backup(config_dir_path: str) -> bool:
|
def restore_backup(config_dir_path: str) -> bool:
|
||||||
@ -119,8 +194,20 @@ def restore_backup(config_dir_path: str) -> bool:
|
|||||||
backup_file_path = restore_content.backup_file_path
|
backup_file_path = restore_content.backup_file_path
|
||||||
_LOGGER.info("Restoring %s", backup_file_path)
|
_LOGGER.info("Restoring %s", backup_file_path)
|
||||||
try:
|
try:
|
||||||
_extract_backup(config_dir, backup_file_path)
|
_extract_backup(
|
||||||
|
config_dir=config_dir,
|
||||||
|
restore_content=restore_content,
|
||||||
|
)
|
||||||
except FileNotFoundError as err:
|
except FileNotFoundError as err:
|
||||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
file_not_found = ValueError(f"Backup file {backup_file_path} does not exist")
|
||||||
|
_write_restore_result_file(config_dir, False, file_not_found)
|
||||||
|
raise file_not_found from err
|
||||||
|
except Exception as err:
|
||||||
|
_write_restore_result_file(config_dir, False, err)
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
_write_restore_result_file(config_dir, True, None)
|
||||||
|
if restore_content.remove_after_restore:
|
||||||
|
backup_file_path.unlink(missing_ok=True)
|
||||||
_LOGGER.info("Restore complete, restarting")
|
_LOGGER.info("Restore complete, restarting")
|
||||||
return True
|
return True
|
||||||
|
@ -31,7 +31,7 @@ def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
|||||||
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||||
# If the file is in /proc we can ignore it.
|
# If the file is in /proc we can ignore it.
|
||||||
args = mapped_args["args"]
|
args = mapped_args["args"]
|
||||||
path = args[0] if type(args[0]) is str else str(args[0]) # noqa: E721
|
path = args[0] if type(args[0]) is str else str(args[0])
|
||||||
return path.startswith(ALLOWED_FILE_PREFIXES)
|
return path.startswith(ALLOWED_FILE_PREFIXES)
|
||||||
|
|
||||||
|
|
||||||
@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||||
|
# If only cadata is passed, we can ignore it
|
||||||
|
kwargs = mapped_args.get("kwargs")
|
||||||
|
return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True, frozen=True)
|
@dataclass(slots=True, frozen=True)
|
||||||
class BlockingCall:
|
class BlockingCall:
|
||||||
"""Class to hold information about a blocking call."""
|
"""Class to hold information about a blocking call."""
|
||||||
@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
|||||||
original_func=SSLContext.load_verify_locations,
|
original_func=SSLContext.load_verify_locations,
|
||||||
object=SSLContext,
|
object=SSLContext,
|
||||||
function="load_verify_locations",
|
function="load_verify_locations",
|
||||||
check_allowed=None,
|
check_allowed=_check_load_verify_locations_call_allowed,
|
||||||
strict=False,
|
strict=False,
|
||||||
strict_core=False,
|
strict_core=False,
|
||||||
skip_for_tests=True,
|
skip_for_tests=True,
|
||||||
@ -172,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
|||||||
strict_core=False,
|
strict_core=False,
|
||||||
skip_for_tests=True,
|
skip_for_tests=True,
|
||||||
),
|
),
|
||||||
|
BlockingCall(
|
||||||
|
original_func=SSLContext.set_default_verify_paths,
|
||||||
|
object=SSLContext,
|
||||||
|
function="set_default_verify_paths",
|
||||||
|
check_allowed=None,
|
||||||
|
strict=False,
|
||||||
|
strict_core=False,
|
||||||
|
skip_for_tests=True,
|
||||||
|
),
|
||||||
BlockingCall(
|
BlockingCall(
|
||||||
original_func=Path.open,
|
original_func=Path.open,
|
||||||
object=Path,
|
object=Path,
|
||||||
|
@ -53,6 +53,7 @@ from .components import (
|
|||||||
logbook as logbook_pre_import, # noqa: F401
|
logbook as logbook_pre_import, # noqa: F401
|
||||||
lovelace as lovelace_pre_import, # noqa: F401
|
lovelace as lovelace_pre_import, # noqa: F401
|
||||||
onboarding as onboarding_pre_import, # noqa: F401
|
onboarding as onboarding_pre_import, # noqa: F401
|
||||||
|
person as person_pre_import, # noqa: F401
|
||||||
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
||||||
repairs as repairs_pre_import, # noqa: F401
|
repairs as repairs_pre_import, # noqa: F401
|
||||||
search as search_pre_import, # noqa: F401
|
search as search_pre_import, # noqa: F401
|
||||||
@ -74,23 +75,27 @@ from .core_config import async_process_ha_core_config
|
|||||||
from .exceptions import HomeAssistantError
|
from .exceptions import HomeAssistantError
|
||||||
from .helpers import (
|
from .helpers import (
|
||||||
area_registry,
|
area_registry,
|
||||||
|
backup,
|
||||||
category_registry,
|
category_registry,
|
||||||
config_validation as cv,
|
config_validation as cv,
|
||||||
device_registry,
|
device_registry,
|
||||||
entity,
|
entity,
|
||||||
entity_registry,
|
entity_registry,
|
||||||
floor_registry,
|
floor_registry,
|
||||||
|
frame,
|
||||||
issue_registry,
|
issue_registry,
|
||||||
label_registry,
|
label_registry,
|
||||||
recorder,
|
recorder,
|
||||||
restore_state,
|
restore_state,
|
||||||
template,
|
template,
|
||||||
translation,
|
translation,
|
||||||
|
trigger,
|
||||||
)
|
)
|
||||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||||
from .helpers.storage import get_internal_store_manager
|
from .helpers.storage import get_internal_store_manager
|
||||||
from .helpers.system_info import async_get_system_info, is_official_image
|
from .helpers.system_info import async_get_system_info
|
||||||
from .helpers.typing import ConfigType
|
from .helpers.typing import ConfigType
|
||||||
|
from .loader import Integration
|
||||||
from .setup import (
|
from .setup import (
|
||||||
# _setup_started is marked as protected to make it clear
|
# _setup_started is marked as protected to make it clear
|
||||||
# that it is not part of the public API and should not be used
|
# that it is not part of the public API and should not be used
|
||||||
@ -106,11 +111,17 @@ from .util.async_ import create_eager_task
|
|||||||
from .util.hass_dict import HassKey
|
from .util.hass_dict import HassKey
|
||||||
from .util.logging import async_activate_log_queue_handler
|
from .util.logging import async_activate_log_queue_handler
|
||||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||||
|
from .util.system_info import is_official_image
|
||||||
|
|
||||||
with contextlib.suppress(ImportError):
|
with contextlib.suppress(ImportError):
|
||||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||||
from anyio._backends import _asyncio # noqa: F401
|
from anyio._backends import _asyncio # noqa: F401
|
||||||
|
|
||||||
|
with contextlib.suppress(ImportError):
|
||||||
|
# httpx will import trio if it is installed which does
|
||||||
|
# blocking I/O in the event loop. We want to avoid that.
|
||||||
|
import trio # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .runner import RuntimeConfig
|
from .runner import RuntimeConfig
|
||||||
@ -128,14 +139,12 @@ DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
|
|||||||
LOG_SLOW_STARTUP_INTERVAL = 60
|
LOG_SLOW_STARTUP_INTERVAL = 60
|
||||||
SLOW_STARTUP_CHECK_INTERVAL = 1
|
SLOW_STARTUP_CHECK_INTERVAL = 1
|
||||||
|
|
||||||
|
STAGE_0_SUBSTAGE_TIMEOUT = 60
|
||||||
STAGE_1_TIMEOUT = 120
|
STAGE_1_TIMEOUT = 120
|
||||||
STAGE_2_TIMEOUT = 300
|
STAGE_2_TIMEOUT = 300
|
||||||
WRAP_UP_TIMEOUT = 300
|
WRAP_UP_TIMEOUT = 300
|
||||||
COOLDOWN_TIME = 60
|
COOLDOWN_TIME = 60
|
||||||
|
|
||||||
|
|
||||||
DEBUGGER_INTEGRATIONS = {"debugpy"}
|
|
||||||
|
|
||||||
# Core integrations are unconditionally loaded
|
# Core integrations are unconditionally loaded
|
||||||
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
|
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
|
||||||
|
|
||||||
@ -146,6 +155,10 @@ LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
|
|||||||
"isal",
|
"isal",
|
||||||
# Set log levels
|
# Set log levels
|
||||||
"logger",
|
"logger",
|
||||||
|
# Ensure network config is available
|
||||||
|
# before hassio or any other integration is
|
||||||
|
# loaded that might create an aiohttp client session
|
||||||
|
"network",
|
||||||
# Error logging
|
# Error logging
|
||||||
"system_log",
|
"system_log",
|
||||||
"sentry",
|
"sentry",
|
||||||
@ -156,12 +169,25 @@ FRONTEND_INTEGRATIONS = {
|
|||||||
# visible in frontend
|
# visible in frontend
|
||||||
"frontend",
|
"frontend",
|
||||||
}
|
}
|
||||||
RECORDER_INTEGRATIONS = {
|
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||||
# Setup after frontend
|
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||||
# To record data
|
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
|
||||||
"recorder",
|
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
|
||||||
}
|
STAGE_0_INTEGRATIONS = (
|
||||||
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
|
# Load logging and http deps as soon as possible
|
||||||
|
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
||||||
|
# Setup frontend
|
||||||
|
("frontend", FRONTEND_INTEGRATIONS, None),
|
||||||
|
# Setup recorder
|
||||||
|
("recorder", {"recorder"}, None),
|
||||||
|
# Start up debuggers. Start these first in case they want to wait.
|
||||||
|
("debugger", {"debugpy"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||||
|
# Zeroconf is used for mdns resolution in aiohttp client helper.
|
||||||
|
("zeroconf", {"zeroconf"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||||
|
)
|
||||||
|
|
||||||
|
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb")
|
||||||
|
# Stage 1 integrations are not to be preimported in bootstrap.
|
||||||
STAGE_1_INTEGRATIONS = {
|
STAGE_1_INTEGRATIONS = {
|
||||||
# We need to make sure discovery integrations
|
# We need to make sure discovery integrations
|
||||||
# update their deps before stage 2 integrations
|
# update their deps before stage 2 integrations
|
||||||
@ -176,6 +202,7 @@ STAGE_1_INTEGRATIONS = {
|
|||||||
# Ensure supervisor is available
|
# Ensure supervisor is available
|
||||||
"hassio",
|
"hassio",
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_INTEGRATIONS = {
|
DEFAULT_INTEGRATIONS = {
|
||||||
# These integrations are set up unless recovery mode is activated.
|
# These integrations are set up unless recovery mode is activated.
|
||||||
#
|
#
|
||||||
@ -216,22 +243,12 @@ DEFAULT_INTEGRATIONS_SUPERVISOR = {
|
|||||||
# These integrations are set up if using the Supervisor
|
# These integrations are set up if using the Supervisor
|
||||||
"hassio",
|
"hassio",
|
||||||
}
|
}
|
||||||
|
|
||||||
CRITICAL_INTEGRATIONS = {
|
CRITICAL_INTEGRATIONS = {
|
||||||
# Recovery mode is activated if these integrations fail to set up
|
# Recovery mode is activated if these integrations fail to set up
|
||||||
"frontend",
|
"frontend",
|
||||||
}
|
}
|
||||||
|
|
||||||
SETUP_ORDER = (
|
|
||||||
# Load logging and http deps as soon as possible
|
|
||||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
|
|
||||||
# Setup frontend
|
|
||||||
("frontend", FRONTEND_INTEGRATIONS),
|
|
||||||
# Setup recorder
|
|
||||||
("recorder", RECORDER_INTEGRATIONS),
|
|
||||||
# Start up debuggers. Start these first in case they want to wait.
|
|
||||||
("debugger", DEBUGGER_INTEGRATIONS),
|
|
||||||
)
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Storage keys we are likely to load during startup
|
# Storage keys we are likely to load during startup
|
||||||
# in order of when we expect to load them.
|
# in order of when we expect to load them.
|
||||||
@ -252,6 +269,7 @@ PRELOAD_STORAGE = [
|
|||||||
"assist_pipeline.pipelines",
|
"assist_pipeline.pipelines",
|
||||||
"core.analytics",
|
"core.analytics",
|
||||||
"auth_module.totp",
|
"auth_module.totp",
|
||||||
|
"backup",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -282,14 +300,6 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
return hass
|
return hass
|
||||||
|
|
||||||
async def stop_hass(hass: core.HomeAssistant) -> None:
|
|
||||||
"""Stop hass."""
|
|
||||||
# Ask integrations to shut down. It's messy but we can't
|
|
||||||
# do a clean stop without knowing what is broken
|
|
||||||
with contextlib.suppress(TimeoutError):
|
|
||||||
async with hass.timeout.async_timeout(10):
|
|
||||||
await hass.async_stop()
|
|
||||||
|
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
||||||
@ -305,10 +315,10 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
block_async_io.enable()
|
block_async_io.enable()
|
||||||
|
|
||||||
config_dict = None
|
|
||||||
basic_setup_success = False
|
|
||||||
|
|
||||||
if not (recovery_mode := runtime_config.recovery_mode):
|
if not (recovery_mode := runtime_config.recovery_mode):
|
||||||
|
config_dict = None
|
||||||
|
basic_setup_success = False
|
||||||
|
|
||||||
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -326,39 +336,43 @@ async def async_setup_hass(
|
|||||||
await async_from_config_dict(config_dict, hass) is not None
|
await async_from_config_dict(config_dict, hass) is not None
|
||||||
)
|
)
|
||||||
|
|
||||||
if config_dict is None:
|
if config_dict is None:
|
||||||
recovery_mode = True
|
recovery_mode = True
|
||||||
await stop_hass(hass)
|
await hass.async_stop(force=True)
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
elif not basic_setup_success:
|
elif not basic_setup_success:
|
||||||
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
|
_LOGGER.warning(
|
||||||
recovery_mode = True
|
"Unable to set up core integrations. Activating recovery mode"
|
||||||
await stop_hass(hass)
|
)
|
||||||
hass = await create_hass()
|
recovery_mode = True
|
||||||
|
await hass.async_stop(force=True)
|
||||||
|
hass = await create_hass()
|
||||||
|
|
||||||
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
|
elif any(
|
||||||
_LOGGER.warning(
|
domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS
|
||||||
"Detected that %s did not load. Activating recovery mode",
|
):
|
||||||
",".join(CRITICAL_INTEGRATIONS),
|
_LOGGER.warning(
|
||||||
)
|
"Detected that %s did not load. Activating recovery mode",
|
||||||
|
",".join(CRITICAL_INTEGRATIONS),
|
||||||
|
)
|
||||||
|
|
||||||
old_config = hass.config
|
old_config = hass.config
|
||||||
old_logging = hass.data.get(DATA_LOGGING)
|
old_logging = hass.data.get(DATA_LOGGING)
|
||||||
|
|
||||||
recovery_mode = True
|
recovery_mode = True
|
||||||
await stop_hass(hass)
|
await hass.async_stop(force=True)
|
||||||
hass = await create_hass()
|
hass = await create_hass()
|
||||||
|
|
||||||
if old_logging:
|
if old_logging:
|
||||||
hass.data[DATA_LOGGING] = old_logging
|
hass.data[DATA_LOGGING] = old_logging
|
||||||
hass.config.debug = old_config.debug
|
hass.config.debug = old_config.debug
|
||||||
hass.config.skip_pip = old_config.skip_pip
|
hass.config.skip_pip = old_config.skip_pip
|
||||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||||
hass.config.internal_url = old_config.internal_url
|
hass.config.internal_url = old_config.internal_url
|
||||||
hass.config.external_url = old_config.external_url
|
hass.config.external_url = old_config.external_url
|
||||||
# Setup loader cache after the config dir has been set
|
# Setup loader cache after the config dir has been set
|
||||||
loader.async_setup(hass)
|
loader.async_setup(hass)
|
||||||
|
|
||||||
if recovery_mode:
|
if recovery_mode:
|
||||||
_LOGGER.info("Starting in recovery mode")
|
_LOGGER.info("Starting in recovery mode")
|
||||||
@ -381,7 +395,7 @@ async def async_setup_hass(
|
|||||||
|
|
||||||
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
||||||
"""Open the UI."""
|
"""Open the UI."""
|
||||||
import webbrowser # pylint: disable=import-outside-toplevel
|
import webbrowser # noqa: PLC0415
|
||||||
|
|
||||||
if hass.config.api is None or "frontend" not in hass.config.components:
|
if hass.config.api is None or "frontend" not in hass.config.components:
|
||||||
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
||||||
@ -421,9 +435,10 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
|||||||
if DATA_REGISTRIES_LOADED in hass.data:
|
if DATA_REGISTRIES_LOADED in hass.data:
|
||||||
return
|
return
|
||||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||||
translation.async_setup(hass)
|
|
||||||
entity.async_setup(hass)
|
entity.async_setup(hass)
|
||||||
|
frame.async_setup(hass)
|
||||||
template.async_setup(hass)
|
template.async_setup(hass)
|
||||||
|
translation.async_setup(hass)
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||||
create_eager_task(area_registry.async_load(hass)),
|
create_eager_task(area_registry.async_load(hass)),
|
||||||
@ -438,6 +453,7 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
|||||||
create_eager_task(restore_state.async_load(hass)),
|
create_eager_task(restore_state.async_load(hass)),
|
||||||
create_eager_task(hass.config_entries.async_initialize()),
|
create_eager_task(hass.config_entries.async_initialize()),
|
||||||
create_eager_task(async_get_system_info(hass)),
|
create_eager_task(async_get_system_info(hass)),
|
||||||
|
create_eager_task(trigger.async_setup(hass)),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -515,7 +531,7 @@ async def async_from_config_dict(
|
|||||||
issue_registry.async_create_issue(
|
issue_registry.async_create_issue(
|
||||||
hass,
|
hass,
|
||||||
core.DOMAIN,
|
core.DOMAIN,
|
||||||
"python_version",
|
f"python_version_{required_python_version}",
|
||||||
is_fixable=False,
|
is_fixable=False,
|
||||||
severity=issue_registry.IssueSeverity.WARNING,
|
severity=issue_registry.IssueSeverity.WARNING,
|
||||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||||
@ -547,8 +563,7 @@ async def async_enable_logging(
|
|||||||
|
|
||||||
if not log_no_color:
|
if not log_no_color:
|
||||||
try:
|
try:
|
||||||
# pylint: disable-next=import-outside-toplevel
|
from colorlog import ColoredFormatter # noqa: PLC0415
|
||||||
from colorlog import ColoredFormatter
|
|
||||||
|
|
||||||
# basicConfig must be called after importing colorlog in order to
|
# basicConfig must be called after importing colorlog in order to
|
||||||
# ensure that the handlers it sets up wraps the correct streams.
|
# ensure that the handlers it sets up wraps the correct streams.
|
||||||
@ -644,11 +659,10 @@ def _create_log_file(
|
|||||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||||
err_log_path, backupCount=1
|
err_log_path, backupCount=1
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
try:
|
err_handler.doRollover()
|
||||||
err_handler.doRollover()
|
except OSError as err:
|
||||||
except OSError as err:
|
_LOGGER.error("Error rolling over log file: %s", err)
|
||||||
_LOGGER.error("Error rolling over log file: %s", err)
|
|
||||||
|
|
||||||
return err_handler
|
return err_handler
|
||||||
|
|
||||||
@ -677,7 +691,6 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
|||||||
return deps_dir
|
return deps_dir
|
||||||
|
|
||||||
|
|
||||||
@core.callback
|
|
||||||
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
||||||
"""Get domains of components to set up."""
|
"""Get domains of components to set up."""
|
||||||
# Filter out the repeating and common config section [homeassistant]
|
# Filter out the repeating and common config section [homeassistant]
|
||||||
@ -699,6 +712,260 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
|||||||
return domains
|
return domains
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_resolve_domains_and_preload(
|
||||||
|
hass: core.HomeAssistant, config: dict[str, Any]
|
||||||
|
) -> tuple[dict[str, Integration], dict[str, Integration]]:
|
||||||
|
"""Resolve all dependencies and return integrations to set up.
|
||||||
|
|
||||||
|
The return value is a tuple of two dictionaries:
|
||||||
|
- The first dictionary contains integrations
|
||||||
|
specified by the configuration (including config entries).
|
||||||
|
- The second dictionary contains the same integrations as the first dictionary
|
||||||
|
together with all their dependencies.
|
||||||
|
"""
|
||||||
|
domains_to_setup = _get_domains(hass, config)
|
||||||
|
platform_integrations = conf_util.extract_platform_integrations(
|
||||||
|
config, BASE_PLATFORMS
|
||||||
|
)
|
||||||
|
# Ensure base platforms that have platform integrations are added to `domains`,
|
||||||
|
# so they can be setup first instead of discovering them later when a config
|
||||||
|
# entry setup task notices that it's needed and there is already a long line
|
||||||
|
# to use the import executor.
|
||||||
|
#
|
||||||
|
# For example if we have
|
||||||
|
# sensor:
|
||||||
|
# - platform: template
|
||||||
|
#
|
||||||
|
# `template` has to be loaded to validate the config for sensor
|
||||||
|
# so we want to start loading `sensor` as soon as we know
|
||||||
|
# it will be needed. The more platforms under `sensor:`, the longer
|
||||||
|
# it will take to finish setup for `sensor` because each of these
|
||||||
|
# platforms has to be imported before we can validate the config.
|
||||||
|
#
|
||||||
|
# Thankfully we are migrating away from the platform pattern
|
||||||
|
# so this will be less of a problem in the future.
|
||||||
|
domains_to_setup.update(platform_integrations)
|
||||||
|
|
||||||
|
# Additionally process base platforms since we do not require the manifest
|
||||||
|
# to list them as dependencies.
|
||||||
|
# We want to later avoid lock contention when multiple integrations try to load
|
||||||
|
# their manifests at once.
|
||||||
|
# Also process integrations that are defined under base platforms
|
||||||
|
# to speed things up.
|
||||||
|
additional_domains_to_process = {
|
||||||
|
*BASE_PLATFORMS,
|
||||||
|
*chain.from_iterable(platform_integrations.values()),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Resolve all dependencies so we know all integrations
|
||||||
|
# that will have to be loaded and start right-away
|
||||||
|
integrations_or_excs = await loader.async_get_integrations(
|
||||||
|
hass, {*domains_to_setup, *additional_domains_to_process}
|
||||||
|
)
|
||||||
|
# Eliminate those missing or with invalid manifest
|
||||||
|
integrations_to_process = {
|
||||||
|
domain: itg
|
||||||
|
for domain, itg in integrations_or_excs.items()
|
||||||
|
if isinstance(itg, Integration)
|
||||||
|
}
|
||||||
|
integrations_dependencies = await loader.resolve_integrations_dependencies(
|
||||||
|
hass, integrations_to_process.values()
|
||||||
|
)
|
||||||
|
# Eliminate those without valid dependencies
|
||||||
|
integrations_to_process = {
|
||||||
|
domain: integrations_to_process[domain] for domain in integrations_dependencies
|
||||||
|
}
|
||||||
|
|
||||||
|
integrations_to_setup = {
|
||||||
|
domain: itg
|
||||||
|
for domain, itg in integrations_to_process.items()
|
||||||
|
if domain in domains_to_setup
|
||||||
|
}
|
||||||
|
all_integrations_to_setup = integrations_to_setup.copy()
|
||||||
|
all_integrations_to_setup.update(
|
||||||
|
(dep, loader.async_get_loaded_integration(hass, dep))
|
||||||
|
for domain in integrations_to_setup
|
||||||
|
for dep in integrations_dependencies[domain].difference(
|
||||||
|
all_integrations_to_setup
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Gather requirements for all integrations,
|
||||||
|
# their dependencies and after dependencies.
|
||||||
|
# To gather all the requirements we must ignore exceptions here.
|
||||||
|
# The exceptions will be detected and handled later in the bootstrap process.
|
||||||
|
integrations_after_dependencies = (
|
||||||
|
await loader.resolve_integrations_after_dependencies(
|
||||||
|
hass, integrations_to_process.values(), ignore_exceptions=True
|
||||||
|
)
|
||||||
|
)
|
||||||
|
integrations_requirements = {
|
||||||
|
domain: itg.requirements for domain, itg in integrations_to_process.items()
|
||||||
|
}
|
||||||
|
integrations_requirements.update(
|
||||||
|
(dep, loader.async_get_loaded_integration(hass, dep).requirements)
|
||||||
|
for deps in integrations_after_dependencies.values()
|
||||||
|
for dep in deps.difference(integrations_requirements)
|
||||||
|
)
|
||||||
|
all_requirements = set(chain.from_iterable(integrations_requirements.values()))
|
||||||
|
|
||||||
|
# Optimistically check if requirements are already installed
|
||||||
|
# ahead of setting up the integrations so we can prime the cache
|
||||||
|
# We do not wait for this since it's an optimization only
|
||||||
|
hass.async_create_background_task(
|
||||||
|
requirements.async_load_installed_versions(hass, all_requirements),
|
||||||
|
"check installed requirements",
|
||||||
|
eager_start=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start loading translations for all integrations we are going to set up
|
||||||
|
# in the background so they are ready when we need them. This avoids a
|
||||||
|
# lot of waiting for the translation load lock and a thundering herd of
|
||||||
|
# tasks trying to load the same translations at the same time as each
|
||||||
|
# integration is loaded.
|
||||||
|
#
|
||||||
|
# We do not wait for this since as soon as the task runs it will
|
||||||
|
# hold the translation load lock and if anything is fast enough to
|
||||||
|
# wait for the translation load lock, loading will be done by the
|
||||||
|
# time it gets to it.
|
||||||
|
translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process}
|
||||||
|
hass.async_create_background_task(
|
||||||
|
translation.async_load_integrations(hass, translations_to_load),
|
||||||
|
"load translations",
|
||||||
|
eager_start=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Preload storage for all integrations we are going to set up
|
||||||
|
# so we do not have to wait for it to be loaded when we need it
|
||||||
|
# in the setup process.
|
||||||
|
hass.async_create_background_task(
|
||||||
|
get_internal_store_manager(hass).async_preload(
|
||||||
|
[*PRELOAD_STORAGE, *all_integrations_to_setup]
|
||||||
|
),
|
||||||
|
"preload storage",
|
||||||
|
eager_start=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return integrations_to_setup, all_integrations_to_setup
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_set_up_integrations(
|
||||||
|
hass: core.HomeAssistant, config: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Set up all the integrations."""
|
||||||
|
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
||||||
|
watcher.async_start()
|
||||||
|
|
||||||
|
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||||
|
hass, config
|
||||||
|
)
|
||||||
|
# Detect all cycles
|
||||||
|
integrations_after_dependencies = (
|
||||||
|
await loader.resolve_integrations_after_dependencies(
|
||||||
|
hass, all_integrations.values(), set(all_integrations)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
all_domains = set(integrations_after_dependencies)
|
||||||
|
domains = set(integrations) & all_domains
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Domains to be set up: %s | %s",
|
||||||
|
domains,
|
||||||
|
all_domains - domains,
|
||||||
|
)
|
||||||
|
|
||||||
|
async_set_domains_to_be_loaded(hass, all_domains)
|
||||||
|
|
||||||
|
# Initialize recorder
|
||||||
|
if "recorder" in all_domains:
|
||||||
|
recorder.async_initialize_recorder(hass)
|
||||||
|
|
||||||
|
# Initialize backup
|
||||||
|
if "backup" in all_domains:
|
||||||
|
backup.async_initialize_backup(hass)
|
||||||
|
|
||||||
|
stages: list[tuple[str, set[str], int | None]] = [
|
||||||
|
*(
|
||||||
|
(name, domain_group, timeout)
|
||||||
|
for name, domain_group, timeout in STAGE_0_INTEGRATIONS
|
||||||
|
),
|
||||||
|
("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT),
|
||||||
|
("2", domains, STAGE_2_TIMEOUT),
|
||||||
|
]
|
||||||
|
|
||||||
|
_LOGGER.info("Setting up stage 0")
|
||||||
|
for name, domain_group, timeout in stages:
|
||||||
|
stage_domains_unfiltered = domain_group & all_domains
|
||||||
|
if not stage_domains_unfiltered:
|
||||||
|
_LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group)
|
||||||
|
continue
|
||||||
|
|
||||||
|
stage_domains = stage_domains_unfiltered - hass.config.components
|
||||||
|
if not stage_domains:
|
||||||
|
_LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered)
|
||||||
|
continue
|
||||||
|
|
||||||
|
stage_dep_domains_unfiltered = {
|
||||||
|
dep
|
||||||
|
for domain in stage_domains
|
||||||
|
for dep in integrations_after_dependencies[domain]
|
||||||
|
if dep not in stage_domains
|
||||||
|
}
|
||||||
|
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||||
|
|
||||||
|
stage_all_domains = stage_domains | stage_dep_domains
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||||
|
name,
|
||||||
|
stage_domains,
|
||||||
|
stage_domains_unfiltered - stage_domains,
|
||||||
|
stage_dep_domains,
|
||||||
|
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||||
|
)
|
||||||
|
|
||||||
|
if timeout is None:
|
||||||
|
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
async with hass.timeout.async_timeout(
|
||||||
|
timeout,
|
||||||
|
cool_down=COOLDOWN_TIME,
|
||||||
|
cancel_message=f"Bootstrap stage {name} timeout",
|
||||||
|
):
|
||||||
|
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||||
|
except TimeoutError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Setup timed out for stage %s waiting on %s - moving forward",
|
||||||
|
name,
|
||||||
|
hass._active_tasks, # noqa: SLF001
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wrap up startup
|
||||||
|
_LOGGER.debug("Waiting for startup to wrap up")
|
||||||
|
try:
|
||||||
|
async with hass.timeout.async_timeout(
|
||||||
|
WRAP_UP_TIMEOUT,
|
||||||
|
cool_down=COOLDOWN_TIME,
|
||||||
|
cancel_message="Bootstrap startup wrap up timeout",
|
||||||
|
):
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
except TimeoutError:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Setup timed out for bootstrap waiting on %s - moving forward",
|
||||||
|
hass._active_tasks, # noqa: SLF001
|
||||||
|
)
|
||||||
|
|
||||||
|
watcher.async_stop()
|
||||||
|
|
||||||
|
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||||
|
setup_time = async_get_setup_timings(hass)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Integration setup times: %s",
|
||||||
|
dict(sorted(setup_time.items(), key=itemgetter(1), reverse=True)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class _WatchPendingSetups:
|
class _WatchPendingSetups:
|
||||||
"""Periodic log and dispatch of setups that are pending."""
|
"""Periodic log and dispatch of setups that are pending."""
|
||||||
|
|
||||||
@ -770,14 +1037,12 @@ class _WatchPendingSetups:
|
|||||||
self._handle = None
|
self._handle = None
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_multi_components(
|
async def _async_setup_multi_components(
|
||||||
hass: core.HomeAssistant,
|
hass: core.HomeAssistant,
|
||||||
domains: set[str],
|
domains: set[str],
|
||||||
config: dict[str, Any],
|
config: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up multiple domains. Log on failure."""
|
"""Set up multiple domains. Log on failure."""
|
||||||
# Avoid creating tasks for domains that were setup in a previous stage
|
|
||||||
domains_not_yet_setup = domains - hass.config.components
|
|
||||||
# Create setup tasks for base platforms first since everything will have
|
# Create setup tasks for base platforms first since everything will have
|
||||||
# to wait to be imported, and the sooner we can get the base platforms
|
# to wait to be imported, and the sooner we can get the base platforms
|
||||||
# loaded the sooner we can start loading the rest of the integrations.
|
# loaded the sooner we can start loading the rest of the integrations.
|
||||||
@ -787,9 +1052,7 @@ async def async_setup_multi_components(
|
|||||||
f"setup component {domain}",
|
f"setup component {domain}",
|
||||||
eager_start=True,
|
eager_start=True,
|
||||||
)
|
)
|
||||||
for domain in sorted(
|
for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True)
|
||||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||||
for idx, domain in enumerate(futures):
|
for idx, domain in enumerate(futures):
|
||||||
@ -800,278 +1063,3 @@ async def async_setup_multi_components(
|
|||||||
domain,
|
domain,
|
||||||
exc_info=(type(result), result, result.__traceback__),
|
exc_info=(type(result), result, result.__traceback__),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _async_resolve_domains_to_setup(
|
|
||||||
hass: core.HomeAssistant, config: dict[str, Any]
|
|
||||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
|
||||||
"""Resolve all dependencies and return list of domains to set up."""
|
|
||||||
domains_to_setup = _get_domains(hass, config)
|
|
||||||
needed_requirements: set[str] = set()
|
|
||||||
platform_integrations = conf_util.extract_platform_integrations(
|
|
||||||
config, BASE_PLATFORMS
|
|
||||||
)
|
|
||||||
# Ensure base platforms that have platform integrations are added to
|
|
||||||
# to `domains_to_setup so they can be setup first instead of
|
|
||||||
# discovering them when later when a config entry setup task
|
|
||||||
# notices its needed and there is already a long line to use
|
|
||||||
# the import executor.
|
|
||||||
#
|
|
||||||
# For example if we have
|
|
||||||
# sensor:
|
|
||||||
# - platform: template
|
|
||||||
#
|
|
||||||
# `template` has to be loaded to validate the config for sensor
|
|
||||||
# so we want to start loading `sensor` as soon as we know
|
|
||||||
# it will be needed. The more platforms under `sensor:`, the longer
|
|
||||||
# it will take to finish setup for `sensor` because each of these
|
|
||||||
# platforms has to be imported before we can validate the config.
|
|
||||||
#
|
|
||||||
# Thankfully we are migrating away from the platform pattern
|
|
||||||
# so this will be less of a problem in the future.
|
|
||||||
domains_to_setup.update(platform_integrations)
|
|
||||||
|
|
||||||
# Load manifests for base platforms and platform based integrations
|
|
||||||
# that are defined under base platforms right away since we do not require
|
|
||||||
# the manifest to list them as dependencies and we want to avoid the lock
|
|
||||||
# contention when multiple integrations try to load them at once
|
|
||||||
additional_manifests_to_load = {
|
|
||||||
*BASE_PLATFORMS,
|
|
||||||
*chain.from_iterable(platform_integrations.values()),
|
|
||||||
}
|
|
||||||
|
|
||||||
translations_to_load = additional_manifests_to_load.copy()
|
|
||||||
|
|
||||||
# Resolve all dependencies so we know all integrations
|
|
||||||
# that will have to be loaded and start right-away
|
|
||||||
integration_cache: dict[str, loader.Integration] = {}
|
|
||||||
to_resolve: set[str] = domains_to_setup
|
|
||||||
while to_resolve or additional_manifests_to_load:
|
|
||||||
old_to_resolve: set[str] = to_resolve
|
|
||||||
to_resolve = set()
|
|
||||||
|
|
||||||
if additional_manifests_to_load:
|
|
||||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
|
||||||
additional_manifests_to_load.clear()
|
|
||||||
else:
|
|
||||||
to_get = old_to_resolve
|
|
||||||
|
|
||||||
manifest_deps: set[str] = set()
|
|
||||||
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
|
||||||
integrations_to_process: list[loader.Integration] = []
|
|
||||||
|
|
||||||
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
|
||||||
if not isinstance(itg, loader.Integration):
|
|
||||||
continue
|
|
||||||
integration_cache[domain] = itg
|
|
||||||
needed_requirements.update(itg.requirements)
|
|
||||||
|
|
||||||
# Make sure manifests for dependencies are loaded in the next
|
|
||||||
# loop to try to group as many as manifest loads in a single
|
|
||||||
# call to avoid the creating one-off executor jobs later in
|
|
||||||
# the setup process
|
|
||||||
additional_manifests_to_load.update(
|
|
||||||
dep
|
|
||||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
|
||||||
if dep not in integration_cache
|
|
||||||
)
|
|
||||||
|
|
||||||
if domain not in old_to_resolve:
|
|
||||||
continue
|
|
||||||
|
|
||||||
integrations_to_process.append(itg)
|
|
||||||
manifest_deps.update(itg.dependencies)
|
|
||||||
manifest_deps.update(itg.after_dependencies)
|
|
||||||
if not itg.all_dependencies_resolved:
|
|
||||||
resolve_dependencies_tasks.append(
|
|
||||||
create_eager_task(
|
|
||||||
itg.resolve_dependencies(),
|
|
||||||
name=f"resolve dependencies {domain}",
|
|
||||||
loop=hass.loop,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if unseen_deps := manifest_deps - integration_cache.keys():
|
|
||||||
# If there are dependencies, try to preload all
|
|
||||||
# the integrations manifest at once and add them
|
|
||||||
# to the list of requirements we need to install
|
|
||||||
# so we can try to check if they are already installed
|
|
||||||
# in a single call below which avoids each integration
|
|
||||||
# having to wait for the lock to do it individually
|
|
||||||
deps = await loader.async_get_integrations(hass, unseen_deps)
|
|
||||||
for dependant_domain, dependant_itg in deps.items():
|
|
||||||
if isinstance(dependant_itg, loader.Integration):
|
|
||||||
integration_cache[dependant_domain] = dependant_itg
|
|
||||||
needed_requirements.update(dependant_itg.requirements)
|
|
||||||
|
|
||||||
if resolve_dependencies_tasks:
|
|
||||||
await asyncio.gather(*resolve_dependencies_tasks)
|
|
||||||
|
|
||||||
for itg in integrations_to_process:
|
|
||||||
try:
|
|
||||||
all_deps = itg.all_dependencies
|
|
||||||
except RuntimeError:
|
|
||||||
# Integration.all_dependencies raises RuntimeError if
|
|
||||||
# dependencies could not be resolved
|
|
||||||
continue
|
|
||||||
for dep in all_deps:
|
|
||||||
if dep in domains_to_setup:
|
|
||||||
continue
|
|
||||||
domains_to_setup.add(dep)
|
|
||||||
to_resolve.add(dep)
|
|
||||||
|
|
||||||
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
|
||||||
|
|
||||||
# Optimistically check if requirements are already installed
|
|
||||||
# ahead of setting up the integrations so we can prime the cache
|
|
||||||
# We do not wait for this since its an optimization only
|
|
||||||
hass.async_create_background_task(
|
|
||||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
|
||||||
"check installed requirements",
|
|
||||||
eager_start=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
#
|
|
||||||
# Only add the domains_to_setup after we finish resolving
|
|
||||||
# as new domains are likely to added in the process
|
|
||||||
#
|
|
||||||
translations_to_load.update(domains_to_setup)
|
|
||||||
# Start loading translations for all integrations we are going to set up
|
|
||||||
# in the background so they are ready when we need them. This avoids a
|
|
||||||
# lot of waiting for the translation load lock and a thundering herd of
|
|
||||||
# tasks trying to load the same translations at the same time as each
|
|
||||||
# integration is loaded.
|
|
||||||
#
|
|
||||||
# We do not wait for this since as soon as the task runs it will
|
|
||||||
# hold the translation load lock and if anything is fast enough to
|
|
||||||
# wait for the translation load lock, loading will be done by the
|
|
||||||
# time it gets to it.
|
|
||||||
hass.async_create_background_task(
|
|
||||||
translation.async_load_integrations(hass, translations_to_load),
|
|
||||||
"load translations",
|
|
||||||
eager_start=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Preload storage for all integrations we are going to set up
|
|
||||||
# so we do not have to wait for it to be loaded when we need it
|
|
||||||
# in the setup process.
|
|
||||||
hass.async_create_background_task(
|
|
||||||
get_internal_store_manager(hass).async_preload(
|
|
||||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
|
||||||
),
|
|
||||||
"preload storage",
|
|
||||||
eager_start=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return domains_to_setup, integration_cache
|
|
||||||
|
|
||||||
|
|
||||||
async def _async_set_up_integrations(
|
|
||||||
hass: core.HomeAssistant, config: dict[str, Any]
|
|
||||||
) -> None:
|
|
||||||
"""Set up all the integrations."""
|
|
||||||
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
|
||||||
watcher.async_start()
|
|
||||||
|
|
||||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
|
||||||
hass, config
|
|
||||||
)
|
|
||||||
|
|
||||||
# Initialize recorder
|
|
||||||
if "recorder" in domains_to_setup:
|
|
||||||
recorder.async_initialize_recorder(hass)
|
|
||||||
|
|
||||||
pre_stage_domains = [
|
|
||||||
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
|
|
||||||
]
|
|
||||||
|
|
||||||
# calculate what components to setup in what stage
|
|
||||||
stage_1_domains: set[str] = set()
|
|
||||||
|
|
||||||
# Find all dependencies of any dependency of any stage 1 integration that
|
|
||||||
# we plan on loading and promote them to stage 1. This is done only to not
|
|
||||||
# get misleading log messages
|
|
||||||
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
|
|
||||||
while deps_promotion:
|
|
||||||
old_deps_promotion = deps_promotion
|
|
||||||
deps_promotion = set()
|
|
||||||
|
|
||||||
for domain in old_deps_promotion:
|
|
||||||
if domain not in domains_to_setup or domain in stage_1_domains:
|
|
||||||
continue
|
|
||||||
|
|
||||||
stage_1_domains.add(domain)
|
|
||||||
|
|
||||||
if (dep_itg := integration_cache.get(domain)) is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
deps_promotion.update(dep_itg.all_dependencies)
|
|
||||||
|
|
||||||
stage_2_domains = domains_to_setup - stage_1_domains
|
|
||||||
|
|
||||||
for name, domain_group in pre_stage_domains:
|
|
||||||
if domain_group:
|
|
||||||
stage_2_domains -= domain_group
|
|
||||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
|
||||||
to_be_loaded = domain_group.copy()
|
|
||||||
to_be_loaded.update(
|
|
||||||
dep
|
|
||||||
for domain in domain_group
|
|
||||||
if (integration := integration_cache.get(domain)) is not None
|
|
||||||
for dep in integration.all_dependencies
|
|
||||||
)
|
|
||||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
|
||||||
await async_setup_multi_components(hass, domain_group, config)
|
|
||||||
|
|
||||||
# Enables after dependencies when setting up stage 1 domains
|
|
||||||
async_set_domains_to_be_loaded(hass, stage_1_domains)
|
|
||||||
|
|
||||||
# Start setup
|
|
||||||
if stage_1_domains:
|
|
||||||
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
|
|
||||||
try:
|
|
||||||
async with hass.timeout.async_timeout(
|
|
||||||
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
|
|
||||||
):
|
|
||||||
await async_setup_multi_components(hass, stage_1_domains, config)
|
|
||||||
except TimeoutError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Setup timed out for stage 1 waiting on %s - moving forward",
|
|
||||||
hass._active_tasks, # noqa: SLF001
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add after dependencies when setting up stage 2 domains
|
|
||||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
|
||||||
|
|
||||||
if stage_2_domains:
|
|
||||||
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
|
||||||
try:
|
|
||||||
async with hass.timeout.async_timeout(
|
|
||||||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
|
||||||
):
|
|
||||||
await async_setup_multi_components(hass, stage_2_domains, config)
|
|
||||||
except TimeoutError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Setup timed out for stage 2 waiting on %s - moving forward",
|
|
||||||
hass._active_tasks, # noqa: SLF001
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wrap up startup
|
|
||||||
_LOGGER.debug("Waiting for startup to wrap up")
|
|
||||||
try:
|
|
||||||
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
except TimeoutError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Setup timed out for bootstrap waiting on %s - moving forward",
|
|
||||||
hass._active_tasks, # noqa: SLF001
|
|
||||||
)
|
|
||||||
|
|
||||||
watcher.async_stop()
|
|
||||||
|
|
||||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
|
||||||
setup_time = async_get_setup_timings(hass)
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Integration setup times: %s",
|
|
||||||
dict(sorted(setup_time.items(), key=itemgetter(1), reverse=True)),
|
|
||||||
)
|
|
||||||
|
@ -1,5 +1,13 @@
|
|||||||
{
|
{
|
||||||
"domain": "amazon",
|
"domain": "amazon",
|
||||||
"name": "Amazon",
|
"name": "Amazon",
|
||||||
"integrations": ["alexa", "amazon_polly", "aws", "fire_tv", "route53"]
|
"integrations": [
|
||||||
|
"alexa",
|
||||||
|
"alexa_devices",
|
||||||
|
"amazon_polly",
|
||||||
|
"aws",
|
||||||
|
"aws_s3",
|
||||||
|
"fire_tv",
|
||||||
|
"route53"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
5
homeassistant/brands/bosch.json
Normal file
5
homeassistant/brands/bosch.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "bosch",
|
||||||
|
"name": "Bosch",
|
||||||
|
"integrations": ["bosch_alarm", "bosch_shc", "home_connect"]
|
||||||
|
}
|
5
homeassistant/brands/eve.json
Normal file
5
homeassistant/brands/eve.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "eve",
|
||||||
|
"name": "Eve",
|
||||||
|
"iot_standards": ["matter"]
|
||||||
|
}
|
@ -5,6 +5,8 @@
|
|||||||
"google_assistant",
|
"google_assistant",
|
||||||
"google_assistant_sdk",
|
"google_assistant_sdk",
|
||||||
"google_cloud",
|
"google_cloud",
|
||||||
|
"google_drive",
|
||||||
|
"google_gemini",
|
||||||
"google_generative_ai_conversation",
|
"google_generative_ai_conversation",
|
||||||
"google_mail",
|
"google_mail",
|
||||||
"google_maps",
|
"google_maps",
|
||||||
|
@ -2,14 +2,17 @@
|
|||||||
"domain": "microsoft",
|
"domain": "microsoft",
|
||||||
"name": "Microsoft",
|
"name": "Microsoft",
|
||||||
"integrations": [
|
"integrations": [
|
||||||
|
"azure_data_explorer",
|
||||||
"azure_devops",
|
"azure_devops",
|
||||||
"azure_event_hub",
|
"azure_event_hub",
|
||||||
"azure_service_bus",
|
"azure_service_bus",
|
||||||
|
"azure_storage",
|
||||||
"microsoft_face_detect",
|
"microsoft_face_detect",
|
||||||
"microsoft_face_identify",
|
"microsoft_face_identify",
|
||||||
"microsoft_face",
|
"microsoft_face",
|
||||||
"microsoft",
|
"microsoft",
|
||||||
"msteams",
|
"msteams",
|
||||||
|
"onedrive",
|
||||||
"xbox"
|
"xbox"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
"domain": "motionblinds",
|
"domain": "motionblinds",
|
||||||
"name": "Motionblinds",
|
"name": "Motionblinds",
|
||||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
"integrations": ["motion_blinds", "motionblinds_ble"],
|
||||||
|
"iot_standards": ["matter"]
|
||||||
}
|
}
|
||||||
|
6
homeassistant/brands/nuki.json
Normal file
6
homeassistant/brands/nuki.json
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"domain": "nuki",
|
||||||
|
"name": "Nuki",
|
||||||
|
"integrations": ["nuki"],
|
||||||
|
"iot_standards": ["matter"]
|
||||||
|
}
|
5
homeassistant/brands/sensorpush.json
Normal file
5
homeassistant/brands/sensorpush.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "sensorpush",
|
||||||
|
"name": "SensorPush",
|
||||||
|
"integrations": ["sensorpush", "sensorpush_cloud"]
|
||||||
|
}
|
6
homeassistant/brands/shelly.json
Normal file
6
homeassistant/brands/shelly.json
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"domain": "shelly",
|
||||||
|
"name": "shelly",
|
||||||
|
"integrations": ["shelly"],
|
||||||
|
"iot_standards": ["zwave"]
|
||||||
|
}
|
5
homeassistant/brands/sky.json
Normal file
5
homeassistant/brands/sky.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "sky",
|
||||||
|
"name": "Sky",
|
||||||
|
"integrations": ["sky_hub", "sky_remote"]
|
||||||
|
}
|
5
homeassistant/brands/slide.json
Normal file
5
homeassistant/brands/slide.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "slide",
|
||||||
|
"name": "Slide",
|
||||||
|
"integrations": ["slide", "slide_local"]
|
||||||
|
}
|
@ -1,5 +1,11 @@
|
|||||||
{
|
{
|
||||||
"domain": "sony",
|
"domain": "sony",
|
||||||
"name": "Sony",
|
"name": "Sony",
|
||||||
"integrations": ["braviatv", "ps4", "sony_projector", "songpal"]
|
"integrations": [
|
||||||
|
"braviatv",
|
||||||
|
"ps4",
|
||||||
|
"sony_projector",
|
||||||
|
"songpal",
|
||||||
|
"playstation_network"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
"domain": "switchbot",
|
"domain": "switchbot",
|
||||||
"name": "SwitchBot",
|
"name": "SwitchBot",
|
||||||
"integrations": ["switchbot", "switchbot_cloud"]
|
"integrations": ["switchbot", "switchbot_cloud"],
|
||||||
|
"iot_standards": ["matter"]
|
||||||
}
|
}
|
||||||
|
5
homeassistant/brands/tilt.json
Normal file
5
homeassistant/brands/tilt.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "tilt",
|
||||||
|
"name": "Tilt",
|
||||||
|
"integrations": ["tilt_ble", "tilt_pi"]
|
||||||
|
}
|
@ -14,30 +14,24 @@ from jaraco.abode.exceptions import (
|
|||||||
)
|
)
|
||||||
from jaraco.abode.helpers.timeline import Groups as GROUPS
|
from jaraco.abode.helpers.timeline import Groups as GROUPS
|
||||||
from requests.exceptions import ConnectTimeout, HTTPError
|
from requests.exceptions import ConnectTimeout, HTTPError
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_DATE,
|
ATTR_DATE,
|
||||||
ATTR_DEVICE_ID,
|
ATTR_DEVICE_ID,
|
||||||
ATTR_ENTITY_ID,
|
|
||||||
ATTR_TIME,
|
ATTR_TIME,
|
||||||
CONF_PASSWORD,
|
CONF_PASSWORD,
|
||||||
CONF_USERNAME,
|
CONF_USERNAME,
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
EVENT_HOMEASSISTANT_STOP,
|
||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, ServiceCall
|
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from .const import CONF_POLLING, DOMAIN, LOGGER
|
from .const import CONF_POLLING, DOMAIN, LOGGER
|
||||||
|
from .services import async_setup_services
|
||||||
SERVICE_SETTINGS = "change_setting"
|
|
||||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
|
||||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
|
||||||
|
|
||||||
ATTR_DEVICE_NAME = "device_name"
|
ATTR_DEVICE_NAME = "device_name"
|
||||||
ATTR_DEVICE_TYPE = "device_type"
|
ATTR_DEVICE_TYPE = "device_type"
|
||||||
@ -45,22 +39,12 @@ ATTR_EVENT_CODE = "event_code"
|
|||||||
ATTR_EVENT_NAME = "event_name"
|
ATTR_EVENT_NAME = "event_name"
|
||||||
ATTR_EVENT_TYPE = "event_type"
|
ATTR_EVENT_TYPE = "event_type"
|
||||||
ATTR_EVENT_UTC = "event_utc"
|
ATTR_EVENT_UTC = "event_utc"
|
||||||
ATTR_SETTING = "setting"
|
|
||||||
ATTR_USER_NAME = "user_name"
|
ATTR_USER_NAME = "user_name"
|
||||||
ATTR_APP_TYPE = "app_type"
|
ATTR_APP_TYPE = "app_type"
|
||||||
ATTR_EVENT_BY = "event_by"
|
ATTR_EVENT_BY = "event_by"
|
||||||
ATTR_VALUE = "value"
|
|
||||||
|
|
||||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||||
|
|
||||||
CHANGE_SETTING_SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
|
||||||
)
|
|
||||||
|
|
||||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
|
||||||
|
|
||||||
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
|
||||||
|
|
||||||
PLATFORMS = [
|
PLATFORMS = [
|
||||||
Platform.ALARM_CONTROL_PANEL,
|
Platform.ALARM_CONTROL_PANEL,
|
||||||
Platform.BINARY_SENSOR,
|
Platform.BINARY_SENSOR,
|
||||||
@ -85,7 +69,7 @@ class AbodeSystem:
|
|||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the Abode component."""
|
"""Set up the Abode component."""
|
||||||
setup_hass_services(hass)
|
async_setup_services(hass)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -138,60 +122,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
return unload_ok
|
return unload_ok
|
||||||
|
|
||||||
|
|
||||||
def setup_hass_services(hass: HomeAssistant) -> None:
|
|
||||||
"""Home Assistant services."""
|
|
||||||
|
|
||||||
def change_setting(call: ServiceCall) -> None:
|
|
||||||
"""Change an Abode system setting."""
|
|
||||||
setting = call.data[ATTR_SETTING]
|
|
||||||
value = call.data[ATTR_VALUE]
|
|
||||||
|
|
||||||
try:
|
|
||||||
hass.data[DOMAIN].abode.set_setting(setting, value)
|
|
||||||
except AbodeException as ex:
|
|
||||||
LOGGER.warning(ex)
|
|
||||||
|
|
||||||
def capture_image(call: ServiceCall) -> None:
|
|
||||||
"""Capture a new image."""
|
|
||||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
|
||||||
|
|
||||||
target_entities = [
|
|
||||||
entity_id
|
|
||||||
for entity_id in hass.data[DOMAIN].entity_ids
|
|
||||||
if entity_id in entity_ids
|
|
||||||
]
|
|
||||||
|
|
||||||
for entity_id in target_entities:
|
|
||||||
signal = f"abode_camera_capture_{entity_id}"
|
|
||||||
dispatcher_send(hass, signal)
|
|
||||||
|
|
||||||
def trigger_automation(call: ServiceCall) -> None:
|
|
||||||
"""Trigger an Abode automation."""
|
|
||||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
|
||||||
|
|
||||||
target_entities = [
|
|
||||||
entity_id
|
|
||||||
for entity_id in hass.data[DOMAIN].entity_ids
|
|
||||||
if entity_id in entity_ids
|
|
||||||
]
|
|
||||||
|
|
||||||
for entity_id in target_entities:
|
|
||||||
signal = f"abode_trigger_automation_{entity_id}"
|
|
||||||
dispatcher_send(hass, signal)
|
|
||||||
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
|
||||||
)
|
|
||||||
|
|
||||||
hass.services.async_register(
|
|
||||||
DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def setup_hass_events(hass: HomeAssistant) -> None:
|
async def setup_hass_events(hass: HomeAssistant) -> None:
|
||||||
"""Home Assistant start and stop callbacks."""
|
"""Home Assistant start and stop callbacks."""
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ from homeassistant.components.alarm_control_panel import (
|
|||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -19,7 +19,9 @@ from .entity import AbodeDevice
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode alarm control panel device."""
|
"""Set up Abode alarm control panel device."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
@ -12,7 +12,7 @@ from homeassistant.components.binary_sensor import (
|
|||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.util.enum import try_parse_enum
|
from homeassistant.util.enum import try_parse_enum
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
@ -21,7 +21,9 @@ from .entity import AbodeDevice
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode binary sensor devices."""
|
"""Set up Abode binary sensor devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
@ -15,7 +15,7 @@ from homeassistant.components.camera import Camera
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import Event, HomeAssistant
|
from homeassistant.core import Event, HomeAssistant
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.util import Throttle
|
from homeassistant.util import Throttle
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
@ -26,7 +26,9 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode camera devices."""
|
"""Set up Abode camera devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
@ -112,9 +112,6 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle a flow initialized by the user."""
|
"""Handle a flow initialized by the user."""
|
||||||
if self._async_current_entries():
|
|
||||||
return self.async_abort(reason="single_instance_allowed")
|
|
||||||
|
|
||||||
if user_input is None:
|
if user_input is None:
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=vol.Schema(self.data_schema)
|
step_id="user", data_schema=vol.Schema(self.data_schema)
|
||||||
|
@ -7,7 +7,7 @@ from jaraco.abode.devices.cover import Cover
|
|||||||
from homeassistant.components.cover import CoverEntity
|
from homeassistant.components.cover import CoverEntity
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -15,7 +15,9 @@ from .entity import AbodeDevice
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode cover devices."""
|
"""Set up Abode cover devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
@ -9,18 +9,16 @@ from jaraco.abode.devices.light import Light
|
|||||||
|
|
||||||
from homeassistant.components.light import (
|
from homeassistant.components.light import (
|
||||||
ATTR_BRIGHTNESS,
|
ATTR_BRIGHTNESS,
|
||||||
ATTR_COLOR_TEMP,
|
ATTR_COLOR_TEMP_KELVIN,
|
||||||
ATTR_HS_COLOR,
|
ATTR_HS_COLOR,
|
||||||
|
DEFAULT_MAX_KELVIN,
|
||||||
|
DEFAULT_MIN_KELVIN,
|
||||||
ColorMode,
|
ColorMode,
|
||||||
LightEntity,
|
LightEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.util.color import (
|
|
||||||
color_temperature_kelvin_to_mired,
|
|
||||||
color_temperature_mired_to_kelvin,
|
|
||||||
)
|
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -28,7 +26,9 @@ from .entity import AbodeDevice
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode light devices."""
|
"""Set up Abode light devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
@ -44,13 +44,13 @@ class AbodeLight(AbodeDevice, LightEntity):
|
|||||||
|
|
||||||
_device: Light
|
_device: Light
|
||||||
_attr_name = None
|
_attr_name = None
|
||||||
|
_attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||||
|
_attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||||
|
|
||||||
def turn_on(self, **kwargs: Any) -> None:
|
def turn_on(self, **kwargs: Any) -> None:
|
||||||
"""Turn on the light."""
|
"""Turn on the light."""
|
||||||
if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable:
|
if ATTR_COLOR_TEMP_KELVIN in kwargs and self._device.is_color_capable:
|
||||||
self._device.set_color_temp(
|
self._device.set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN])
|
||||||
int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]))
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if ATTR_HS_COLOR in kwargs and self._device.is_color_capable:
|
if ATTR_HS_COLOR in kwargs and self._device.is_color_capable:
|
||||||
@ -85,10 +85,10 @@ class AbodeLight(AbodeDevice, LightEntity):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def color_temp(self) -> int | None:
|
def color_temp_kelvin(self) -> int | None:
|
||||||
"""Return the color temp of the light."""
|
"""Return the color temp of the light."""
|
||||||
if self._device.has_color:
|
if self._device.has_color:
|
||||||
return color_temperature_kelvin_to_mired(self._device.color_temp)
|
return int(self._device.color_temp)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -7,7 +7,7 @@ from jaraco.abode.devices.lock import Lock
|
|||||||
from homeassistant.components.lock import LockEntity
|
from homeassistant.components.lock import LockEntity
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -15,7 +15,9 @@ from .entity import AbodeDevice
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode lock devices."""
|
"""Set up Abode lock devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
@ -9,5 +9,6 @@
|
|||||||
},
|
},
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["jaraco.abode", "lomond"],
|
"loggers": ["jaraco.abode", "lomond"],
|
||||||
"requirements": ["jaraco.abode==6.2.1"]
|
"requirements": ["jaraco.abode==6.2.1"],
|
||||||
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ from homeassistant.components.sensor import (
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import LIGHT_LUX, PERCENTAGE, UnitOfTemperature
|
from homeassistant.const import LIGHT_LUX, PERCENTAGE, UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -61,7 +61,9 @@ SENSOR_TYPES: tuple[AbodeSensorDescription, ...] = (
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode sensor devices."""
|
"""Set up Abode sensor devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
90
homeassistant/components/abode/services.py
Normal file
90
homeassistant/components/abode/services.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
"""Support for the Abode Security System."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from jaraco.abode.exceptions import Exception as AbodeException
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.const import ATTR_ENTITY_ID
|
||||||
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||||
|
|
||||||
|
from .const import DOMAIN, LOGGER
|
||||||
|
|
||||||
|
SERVICE_SETTINGS = "change_setting"
|
||||||
|
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||||
|
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||||
|
|
||||||
|
ATTR_SETTING = "setting"
|
||||||
|
ATTR_VALUE = "value"
|
||||||
|
|
||||||
|
|
||||||
|
CHANGE_SETTING_SCHEMA = vol.Schema(
|
||||||
|
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
||||||
|
)
|
||||||
|
|
||||||
|
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||||
|
|
||||||
|
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||||
|
|
||||||
|
|
||||||
|
def _change_setting(call: ServiceCall) -> None:
|
||||||
|
"""Change an Abode system setting."""
|
||||||
|
setting = call.data[ATTR_SETTING]
|
||||||
|
value = call.data[ATTR_VALUE]
|
||||||
|
|
||||||
|
try:
|
||||||
|
call.hass.data[DOMAIN].abode.set_setting(setting, value)
|
||||||
|
except AbodeException as ex:
|
||||||
|
LOGGER.warning(ex)
|
||||||
|
|
||||||
|
|
||||||
|
def _capture_image(call: ServiceCall) -> None:
|
||||||
|
"""Capture a new image."""
|
||||||
|
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||||
|
|
||||||
|
target_entities = [
|
||||||
|
entity_id
|
||||||
|
for entity_id in call.hass.data[DOMAIN].entity_ids
|
||||||
|
if entity_id in entity_ids
|
||||||
|
]
|
||||||
|
|
||||||
|
for entity_id in target_entities:
|
||||||
|
signal = f"abode_camera_capture_{entity_id}"
|
||||||
|
dispatcher_send(call.hass, signal)
|
||||||
|
|
||||||
|
|
||||||
|
def _trigger_automation(call: ServiceCall) -> None:
|
||||||
|
"""Trigger an Abode automation."""
|
||||||
|
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||||
|
|
||||||
|
target_entities = [
|
||||||
|
entity_id
|
||||||
|
for entity_id in call.hass.data[DOMAIN].entity_ids
|
||||||
|
if entity_id in entity_ids
|
||||||
|
]
|
||||||
|
|
||||||
|
for entity_id in target_entities:
|
||||||
|
signal = f"abode_trigger_automation_{entity_id}"
|
||||||
|
dispatcher_send(call.hass, signal)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_setup_services(hass: HomeAssistant) -> None:
|
||||||
|
"""Home Assistant services."""
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SERVICE_SETTINGS, _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN, SERVICE_CAPTURE_IMAGE, _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.services.async_register(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_TRIGGER_AUTOMATION,
|
||||||
|
_trigger_automation,
|
||||||
|
schema=AUTOMATION_SCHEMA,
|
||||||
|
)
|
@ -28,24 +28,23 @@
|
|||||||
"invalid_mfa_code": "Invalid MFA code"
|
"invalid_mfa_code": "Invalid MFA code"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"services": {
|
"services": {
|
||||||
"capture_image": {
|
"capture_image": {
|
||||||
"name": "Capture image",
|
"name": "Capture image",
|
||||||
"description": "Request a new image capture from a camera device.",
|
"description": "Requests a new image capture from a camera device.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"entity_id": {
|
"entity_id": {
|
||||||
"name": "Entity",
|
"name": "Entity",
|
||||||
"description": "Entity id of the camera to request an image."
|
"description": "Entity ID of the camera to request an image from."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"change_setting": {
|
"change_setting": {
|
||||||
"name": "Change setting",
|
"name": "Change setting",
|
||||||
"description": "Change an Abode system setting.",
|
"description": "Changes an Abode system setting.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"setting": {
|
"setting": {
|
||||||
"name": "Setting",
|
"name": "Setting",
|
||||||
@ -59,11 +58,11 @@
|
|||||||
},
|
},
|
||||||
"trigger_automation": {
|
"trigger_automation": {
|
||||||
"name": "Trigger automation",
|
"name": "Trigger automation",
|
||||||
"description": "Trigger an Abode automation.",
|
"description": "Triggers an Abode automation.",
|
||||||
"fields": {
|
"fields": {
|
||||||
"entity_id": {
|
"entity_id": {
|
||||||
"name": "Entity",
|
"name": "Entity",
|
||||||
"description": "Entity id of the automation to trigger."
|
"description": "Entity ID of the automation to trigger."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@ from homeassistant.components.switch import SwitchEntity
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AbodeSystem
|
from . import AbodeSystem
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
@ -20,7 +20,9 @@ DEVICE_TYPES = ["switch", "valve"]
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Abode switch devices."""
|
"""Set up Abode switch devices."""
|
||||||
data: AbodeSystem = hass.data[DOMAIN]
|
data: AbodeSystem = hass.data[DOMAIN]
|
||||||
|
31
homeassistant/components/acaia/__init__.py
Normal file
31
homeassistant/components/acaia/__init__.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Initialize the Acaia component."""
|
||||||
|
|
||||||
|
from homeassistant.const import Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||||
|
|
||||||
|
PLATFORMS = [
|
||||||
|
Platform.BINARY_SENSOR,
|
||||||
|
Platform.BUTTON,
|
||||||
|
Platform.SENSOR,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||||
|
"""Set up acaia as config entry."""
|
||||||
|
|
||||||
|
coordinator = AcaiaCoordinator(hass, entry)
|
||||||
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
61
homeassistant/components/acaia/binary_sensor.py
Normal file
61
homeassistant/components/acaia/binary_sensor.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
"""Binary sensor platform for Acaia scales."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
|
|
||||||
|
from homeassistant.components.binary_sensor import (
|
||||||
|
BinarySensorDeviceClass,
|
||||||
|
BinarySensorEntity,
|
||||||
|
BinarySensorEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry
|
||||||
|
from .entity import AcaiaEntity
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||||
|
"""Description for Acaia binary sensor entities."""
|
||||||
|
|
||||||
|
is_on_fn: Callable[[AcaiaScale], bool]
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = (
|
||||||
|
AcaiaBinarySensorEntityDescription(
|
||||||
|
key="timer_running",
|
||||||
|
translation_key="timer_running",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
is_on_fn=lambda scale: scale.timer_running,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up binary sensors."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
async_add_entities(
|
||||||
|
AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity):
|
||||||
|
"""Representation of an Acaia binary sensor."""
|
||||||
|
|
||||||
|
entity_description: AcaiaBinarySensorEntityDescription
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return true if the binary sensor is on."""
|
||||||
|
return self.entity_description.is_on_fn(self._scale)
|
63
homeassistant/components/acaia/button.py
Normal file
63
homeassistant/components/acaia/button.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
"""Button entities for Acaia scales."""
|
||||||
|
|
||||||
|
from collections.abc import Callable, Coroutine
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
|
|
||||||
|
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry
|
||||||
|
from .entity import AcaiaEntity
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||||
|
"""Description for acaia button entities."""
|
||||||
|
|
||||||
|
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||||
|
|
||||||
|
|
||||||
|
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||||
|
AcaiaButtonEntityDescription(
|
||||||
|
key="tare",
|
||||||
|
translation_key="tare",
|
||||||
|
press_fn=lambda scale: scale.tare(),
|
||||||
|
),
|
||||||
|
AcaiaButtonEntityDescription(
|
||||||
|
key="reset_timer",
|
||||||
|
translation_key="reset_timer",
|
||||||
|
press_fn=lambda scale: scale.reset_timer(),
|
||||||
|
),
|
||||||
|
AcaiaButtonEntityDescription(
|
||||||
|
key="start_stop",
|
||||||
|
translation_key="start_stop",
|
||||||
|
press_fn=lambda scale: scale.start_stop_timer(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up button entities and services."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||||
|
"""Representation of an Acaia button."""
|
||||||
|
|
||||||
|
entity_description: AcaiaButtonEntityDescription
|
||||||
|
|
||||||
|
async def async_press(self) -> None:
|
||||||
|
"""Handle the button press."""
|
||||||
|
await self.entity_description.press_fn(self._scale)
|
149
homeassistant/components/acaia/config_flow.py
Normal file
149
homeassistant/components/acaia/config_flow.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
"""Config flow for Acaia integration."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||||
|
from aioacaia.helpers import is_new_scale
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.bluetooth import (
|
||||||
|
BluetoothServiceInfoBleak,
|
||||||
|
async_discovered_service_info,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||||
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
|
from homeassistant.helpers.selector import (
|
||||||
|
SelectOptionDict,
|
||||||
|
SelectSelector,
|
||||||
|
SelectSelectorConfig,
|
||||||
|
SelectSelectorMode,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for acaia."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the config flow."""
|
||||||
|
self._discovered: dict[str, Any] = {}
|
||||||
|
self._discovered_devices: dict[str, str] = {}
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
mac = user_input[CONF_ADDRESS]
|
||||||
|
try:
|
||||||
|
is_new_style_scale = await is_new_scale(mac)
|
||||||
|
except AcaiaDeviceNotFound:
|
||||||
|
errors["base"] = "device_not_found"
|
||||||
|
except AcaiaError:
|
||||||
|
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
except AcaiaUnknownDevice:
|
||||||
|
return self.async_abort(reason="unsupported_device")
|
||||||
|
else:
|
||||||
|
await self.async_set_unique_id(format_mac(mac))
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=self._discovered_devices[mac],
|
||||||
|
data={
|
||||||
|
CONF_ADDRESS: mac,
|
||||||
|
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
for device in async_discovered_service_info(self.hass):
|
||||||
|
self._discovered_devices[device.address] = device.name
|
||||||
|
|
||||||
|
if not self._discovered_devices:
|
||||||
|
return self.async_abort(reason="no_devices_found")
|
||||||
|
|
||||||
|
options = [
|
||||||
|
SelectOptionDict(
|
||||||
|
value=device_mac,
|
||||||
|
label=f"{device_name} ({device_mac})",
|
||||||
|
)
|
||||||
|
for device_mac, device_name in self._discovered_devices.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||||
|
SelectSelectorConfig(
|
||||||
|
options=options,
|
||||||
|
mode=SelectSelectorMode.DROPDOWN,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_bluetooth(
|
||||||
|
self, discovery_info: BluetoothServiceInfoBleak
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a discovered Bluetooth device."""
|
||||||
|
|
||||||
|
self._discovered[CONF_ADDRESS] = discovery_info.address
|
||||||
|
self._discovered[CONF_NAME] = discovery_info.name
|
||||||
|
|
||||||
|
await self.async_set_unique_id(format_mac(discovery_info.address))
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||||
|
discovery_info.address
|
||||||
|
)
|
||||||
|
except AcaiaDeviceNotFound:
|
||||||
|
_LOGGER.debug("Device not found during discovery")
|
||||||
|
return self.async_abort(reason="device_not_found")
|
||||||
|
except AcaiaError:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Error occurred while connecting to the scale during discovery",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return self.async_abort(reason="unknown")
|
||||||
|
except AcaiaUnknownDevice:
|
||||||
|
_LOGGER.debug("Unsupported device during discovery")
|
||||||
|
return self.async_abort(reason="unsupported_device")
|
||||||
|
|
||||||
|
return await self.async_step_bluetooth_confirm()
|
||||||
|
|
||||||
|
async def async_step_bluetooth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle confirmation of Bluetooth discovery."""
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=self._discovered[CONF_NAME],
|
||||||
|
data={
|
||||||
|
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||||
|
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.context["title_placeholders"] = placeholders = {
|
||||||
|
CONF_NAME: self._discovered[CONF_NAME]
|
||||||
|
}
|
||||||
|
|
||||||
|
self._set_confirm_only()
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="bluetooth_confirm",
|
||||||
|
description_placeholders=placeholders,
|
||||||
|
)
|
4
homeassistant/components/acaia/const.py
Normal file
4
homeassistant/components/acaia/const.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
"""Constants for component."""
|
||||||
|
|
||||||
|
DOMAIN = "acaia"
|
||||||
|
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
86
homeassistant/components/acaia/coordinator.py
Normal file
86
homeassistant/components/acaia/coordinator.py
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
"""Coordinator for Acaia integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
|
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_ADDRESS
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
|
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||||
|
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=15)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||||
|
"""Class to handle fetching data from the scale."""
|
||||||
|
|
||||||
|
config_entry: AcaiaConfigEntry
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||||
|
"""Initialize coordinator."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name="acaia coordinator",
|
||||||
|
update_interval=SCAN_INTERVAL,
|
||||||
|
config_entry=entry,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._scale = AcaiaScale(
|
||||||
|
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||||
|
name=entry.title,
|
||||||
|
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||||
|
notify_callback=self.async_update_listeners,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scale(self) -> AcaiaScale:
|
||||||
|
"""Return the scale object."""
|
||||||
|
return self._scale
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> None:
|
||||||
|
"""Fetch data."""
|
||||||
|
|
||||||
|
# scale is already connected, return
|
||||||
|
if self._scale.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
# scale is not connected, try to connect
|
||||||
|
try:
|
||||||
|
await self._scale.connect(setup_tasks=False)
|
||||||
|
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Could not connect to scale: %s, Error: %s",
|
||||||
|
self.config_entry.data[CONF_ADDRESS],
|
||||||
|
ex,
|
||||||
|
)
|
||||||
|
self._scale.device_disconnected_handler(notify=False)
|
||||||
|
return
|
||||||
|
|
||||||
|
# connected, set up background tasks
|
||||||
|
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||||
|
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||||
|
hass=self.hass,
|
||||||
|
target=self._scale.send_heartbeats(),
|
||||||
|
name="acaia_heartbeat_task",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||||
|
self._scale.process_queue_task = (
|
||||||
|
self.config_entry.async_create_background_task(
|
||||||
|
hass=self.hass,
|
||||||
|
target=self._scale.process_queue(),
|
||||||
|
name="acaia_process_queue_task",
|
||||||
|
)
|
||||||
|
)
|
31
homeassistant/components/acaia/diagnostics.py
Normal file
31
homeassistant/components/acaia/diagnostics.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Diagnostics support for Acaia."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import asdict
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import AcaiaConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
scale = coordinator.scale
|
||||||
|
|
||||||
|
# collect all data sources
|
||||||
|
return {
|
||||||
|
"model": scale.model,
|
||||||
|
"device_state": (
|
||||||
|
asdict(scale.device_state) if scale.device_state is not None else ""
|
||||||
|
),
|
||||||
|
"mac": scale.mac,
|
||||||
|
"last_disconnect_time": scale.last_disconnect_time,
|
||||||
|
"timer": scale.timer,
|
||||||
|
"weight": scale.weight,
|
||||||
|
}
|
46
homeassistant/components/acaia/entity.py
Normal file
46
homeassistant/components/acaia/entity.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
"""Base class for Acaia entities."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from homeassistant.helpers.device_registry import (
|
||||||
|
CONNECTION_BLUETOOTH,
|
||||||
|
DeviceInfo,
|
||||||
|
format_mac,
|
||||||
|
)
|
||||||
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AcaiaCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||||
|
"""Common elements for all entities."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AcaiaCoordinator,
|
||||||
|
entity_description: EntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self.entity_description = entity_description
|
||||||
|
self._scale = coordinator.scale
|
||||||
|
formatted_mac = format_mac(self._scale.mac)
|
||||||
|
self._attr_unique_id = f"{formatted_mac}_{entity_description.key}"
|
||||||
|
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, formatted_mac)},
|
||||||
|
manufacturer="Acaia",
|
||||||
|
model=self._scale.model,
|
||||||
|
suggested_area="Kitchen",
|
||||||
|
connections={(CONNECTION_BLUETOOTH, self._scale.mac)},
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Returns whether entity is available."""
|
||||||
|
return super().available and self._scale.connected
|
24
homeassistant/components/acaia/icons.json
Normal file
24
homeassistant/components/acaia/icons.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"timer_running": {
|
||||||
|
"default": "mdi:timer",
|
||||||
|
"state": {
|
||||||
|
"on": "mdi:timer-play",
|
||||||
|
"off": "mdi:timer-off"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"button": {
|
||||||
|
"tare": {
|
||||||
|
"default": "mdi:scale-balance"
|
||||||
|
},
|
||||||
|
"reset_timer": {
|
||||||
|
"default": "mdi:timer-refresh"
|
||||||
|
},
|
||||||
|
"start_stop": {
|
||||||
|
"default": "mdi:timer-play"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
30
homeassistant/components/acaia/manifest.json
Normal file
30
homeassistant/components/acaia/manifest.json
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"domain": "acaia",
|
||||||
|
"name": "Acaia",
|
||||||
|
"bluetooth": [
|
||||||
|
{
|
||||||
|
"manufacturer_id": 16962
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "ACAIA*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "PYXIS-*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "LUNAR-*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "PROCHBT001"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"codeowners": ["@zweckj"],
|
||||||
|
"config_flow": true,
|
||||||
|
"dependencies": ["bluetooth_adapters"],
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||||
|
"integration_type": "device",
|
||||||
|
"iot_class": "local_push",
|
||||||
|
"loggers": ["aioacaia"],
|
||||||
|
"quality_scale": "platinum",
|
||||||
|
"requirements": ["aioacaia==0.1.14"]
|
||||||
|
}
|
106
homeassistant/components/acaia/quality_scale.yaml
Normal file
106
homeassistant/components/acaia/quality_scale.yaml
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No custom actions are defined.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No custom actions are defined.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No explicit event subscriptions.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Device is expected to be offline most of the time, but needs to connect quickly once available.
|
||||||
|
unique-config-entry: done
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No custom actions are defined.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters: done
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable:
|
||||||
|
status: done
|
||||||
|
comment: |
|
||||||
|
Handled by coordinator.
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No authentication required.
|
||||||
|
test-coverage: done
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No IP discovery.
|
||||||
|
discovery:
|
||||||
|
status: done
|
||||||
|
comment: |
|
||||||
|
Bluetooth discovery.
|
||||||
|
docs-data-update: done
|
||||||
|
docs-examples: done
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Device type integration.
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No noisy/non-essential entities.
|
||||||
|
entity-translations: done
|
||||||
|
exception-translations:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No custom exceptions.
|
||||||
|
icon-translations: done
|
||||||
|
reconfiguration-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Only parameter that could be changed (MAC = unique_id) would force a new config entry.
|
||||||
|
repair-issues:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
No repairs/issues.
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Device type integration.
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Bluetooth connection.
|
||||||
|
strict-typing: done
|
146
homeassistant/components/acaia/sensor.py
Normal file
146
homeassistant/components/acaia/sensor.py
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
"""Sensor platform for Acaia."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale
|
||||||
|
from aioacaia.const import UnitMass as AcaiaUnitOfMass
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import (
|
||||||
|
RestoreSensor,
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorEntityDescription,
|
||||||
|
SensorExtraStoredData,
|
||||||
|
SensorStateClass,
|
||||||
|
)
|
||||||
|
from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry
|
||||||
|
from .entity import AcaiaEntity
|
||||||
|
|
||||||
|
# Coordinator is used to centralize the data updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaSensorEntityDescription(SensorEntityDescription):
|
||||||
|
"""Description for Acaia sensor entities."""
|
||||||
|
|
||||||
|
value_fn: Callable[[AcaiaScale], int | float | None]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription):
|
||||||
|
"""Description for Acaia sensor entities with dynamic units."""
|
||||||
|
|
||||||
|
unit_fn: Callable[[AcaiaDeviceState], str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
|
||||||
|
AcaiaDynamicUnitSensorEntityDescription(
|
||||||
|
key="weight",
|
||||||
|
device_class=SensorDeviceClass.WEIGHT,
|
||||||
|
native_unit_of_measurement=UnitOfMass.GRAMS,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
unit_fn=lambda data: (
|
||||||
|
UnitOfMass.OUNCES
|
||||||
|
if data.units == AcaiaUnitOfMass.OUNCES
|
||||||
|
else UnitOfMass.GRAMS
|
||||||
|
),
|
||||||
|
value_fn=lambda scale: scale.weight,
|
||||||
|
),
|
||||||
|
AcaiaDynamicUnitSensorEntityDescription(
|
||||||
|
key="flow_rate",
|
||||||
|
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||||
|
native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
|
||||||
|
suggested_display_precision=1,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda scale: scale.flow_rate,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
|
||||||
|
AcaiaSensorEntityDescription(
|
||||||
|
key="battery",
|
||||||
|
device_class=SensorDeviceClass.BATTERY,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda scale: (
|
||||||
|
scale.device_state.battery_level if scale.device_state else None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up sensors."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
entities: list[SensorEntity] = [
|
||||||
|
AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS
|
||||||
|
]
|
||||||
|
entities.extend(
|
||||||
|
AcaiaRestoreSensor(coordinator, entity_description)
|
||||||
|
for entity_description in RESTORE_SENSORS
|
||||||
|
)
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaSensor(AcaiaEntity, SensorEntity):
|
||||||
|
"""Representation of an Acaia sensor."""
|
||||||
|
|
||||||
|
entity_description: AcaiaDynamicUnitSensorEntityDescription
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_unit_of_measurement(self) -> str | None:
|
||||||
|
"""Return the unit of measurement of this entity."""
|
||||||
|
if (
|
||||||
|
self._scale.device_state is not None
|
||||||
|
and self.entity_description.unit_fn is not None
|
||||||
|
):
|
||||||
|
return self.entity_description.unit_fn(self._scale.device_state)
|
||||||
|
return self.entity_description.native_unit_of_measurement
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> int | float | None:
|
||||||
|
"""Return the state of the entity."""
|
||||||
|
return self.entity_description.value_fn(self._scale)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor):
|
||||||
|
"""Representation of an Acaia sensor with restore capabilities."""
|
||||||
|
|
||||||
|
entity_description: AcaiaSensorEntityDescription
|
||||||
|
_restored_data: SensorExtraStoredData | None = None
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Handle entity which will be added."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
|
||||||
|
self._restored_data = await self.async_get_last_sensor_data()
|
||||||
|
if self._restored_data is not None:
|
||||||
|
self._attr_native_value = self._restored_data.native_value
|
||||||
|
self._attr_native_unit_of_measurement = (
|
||||||
|
self._restored_data.native_unit_of_measurement
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._scale.device_state is not None:
|
||||||
|
self._attr_native_value = self.entity_description.value_fn(self._scale)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Handle updated data from the coordinator."""
|
||||||
|
if self._scale.device_state is not None:
|
||||||
|
self._attr_native_value = self.entity_description.value_fn(self._scale)
|
||||||
|
self._async_write_ha_state()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return True if entity is available."""
|
||||||
|
return super().available or self._restored_data is not None
|
46
homeassistant/components/acaia/strings.json
Normal file
46
homeassistant/components/acaia/strings.json
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"flow_title": "{name}",
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||||
|
"unsupported_device": "This device is not supported."
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"device_not_found": "Device could not be found.",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"bluetooth_confirm": {
|
||||||
|
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||||
|
"data": {
|
||||||
|
"address": "[%key:common::config_flow::data::device%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"address": "Select Acaia scale you want to set up"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"timer_running": {
|
||||||
|
"name": "Timer running"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"button": {
|
||||||
|
"tare": {
|
||||||
|
"name": "Tare"
|
||||||
|
},
|
||||||
|
"reset_timer": {
|
||||||
|
"name": "Reset timer"
|
||||||
|
},
|
||||||
|
"start_stop": {
|
||||||
|
"name": "Start/stop timer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -12,8 +12,8 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
import homeassistant.helpers.config_validation as cv
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ from homeassistant.components.weather import (
|
|||||||
|
|
||||||
API_METRIC: Final = "Metric"
|
API_METRIC: Final = "Metric"
|
||||||
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
ATTRIBUTION: Final = "Data provided by AccuWeather"
|
||||||
ATTR_CATEGORY: Final = "Category"
|
ATTR_CATEGORY_VALUE = "CategoryValue"
|
||||||
ATTR_DIRECTION: Final = "Direction"
|
ATTR_DIRECTION: Final = "Direction"
|
||||||
ATTR_ENGLISH: Final = "English"
|
ATTR_ENGLISH: Final = "English"
|
||||||
ATTR_LEVEL: Final = "level"
|
ATTR_LEVEL: Final = "level"
|
||||||
@ -55,5 +55,19 @@ CONDITION_MAP = {
|
|||||||
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
for cond_ha, cond_codes in CONDITION_CLASSES.items()
|
||||||
for cond_code in cond_codes
|
for cond_code in cond_codes
|
||||||
}
|
}
|
||||||
|
AIR_QUALITY_CATEGORY_MAP = {
|
||||||
|
1: "good",
|
||||||
|
2: "moderate",
|
||||||
|
3: "unhealthy",
|
||||||
|
4: "very_unhealthy",
|
||||||
|
5: "hazardous",
|
||||||
|
}
|
||||||
|
POLLEN_CATEGORY_MAP = {
|
||||||
|
1: "low",
|
||||||
|
2: "moderate",
|
||||||
|
3: "high",
|
||||||
|
4: "very_high",
|
||||||
|
5: "extreme",
|
||||||
|
}
|
||||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||||
|
@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
async with timeout(10):
|
async with timeout(10):
|
||||||
result = await self.accuweather.async_get_current_conditions()
|
result = await self.accuweather.async_get_current_conditions()
|
||||||
except EXCEPTIONS as error:
|
except EXCEPTIONS as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="current_conditions_update_error",
|
||||||
|
translation_placeholders={"error": repr(error)},
|
||||||
|
) from error
|
||||||
|
|
||||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||||
|
|
||||||
@ -117,9 +121,15 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||||||
"""Update data via library."""
|
"""Update data via library."""
|
||||||
try:
|
try:
|
||||||
async with timeout(10):
|
async with timeout(10):
|
||||||
result = await self.accuweather.async_get_daily_forecast()
|
result = await self.accuweather.async_get_daily_forecast(
|
||||||
|
language=self.hass.config.language
|
||||||
|
)
|
||||||
except EXCEPTIONS as error:
|
except EXCEPTIONS as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="forecast_update_error",
|
||||||
|
translation_placeholders={"error": repr(error)},
|
||||||
|
) from error
|
||||||
|
|
||||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["accuweather"],
|
"loggers": ["accuweather"],
|
||||||
"quality_scale": "platinum",
|
"requirements": ["accuweather==4.2.0"],
|
||||||
"requirements": ["accuweather==3.0.0"],
|
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -25,12 +25,13 @@ from homeassistant.const import (
|
|||||||
UnitOfVolumetricFlux,
|
UnitOfVolumetricFlux,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
|
AIR_QUALITY_CATEGORY_MAP,
|
||||||
API_METRIC,
|
API_METRIC,
|
||||||
ATTR_CATEGORY,
|
ATTR_CATEGORY_VALUE,
|
||||||
ATTR_DIRECTION,
|
ATTR_DIRECTION,
|
||||||
ATTR_ENGLISH,
|
ATTR_ENGLISH,
|
||||||
ATTR_LEVEL,
|
ATTR_LEVEL,
|
||||||
@ -38,6 +39,7 @@ from .const import (
|
|||||||
ATTR_VALUE,
|
ATTR_VALUE,
|
||||||
ATTRIBUTION,
|
ATTRIBUTION,
|
||||||
MAX_FORECAST_DAYS,
|
MAX_FORECAST_DAYS,
|
||||||
|
POLLEN_CATEGORY_MAP,
|
||||||
)
|
)
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
AccuWeatherConfigEntry,
|
AccuWeatherConfigEntry,
|
||||||
@ -59,9 +61,9 @@ class AccuWeatherSensorDescription(SensorEntityDescription):
|
|||||||
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
key="AirQuality",
|
key="AirQuality",
|
||||||
value_fn=lambda data: cast(str, data[ATTR_CATEGORY]),
|
value_fn=lambda data: AIR_QUALITY_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]],
|
||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=["good", "hazardous", "high", "low", "moderate", "unhealthy"],
|
options=list(AIR_QUALITY_CATEGORY_MAP.values()),
|
||||||
translation_key="air_quality",
|
translation_key="air_quality",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -83,7 +85,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="grass_pollen",
|
translation_key="grass_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -107,7 +111,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="mold_pollen",
|
translation_key="mold_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -115,7 +121,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="ragweed_pollen",
|
translation_key="ragweed_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -181,14 +189,18 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="tree_pollen",
|
translation_key="tree_pollen",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
key="UVIndex",
|
key="UVIndex",
|
||||||
native_unit_of_measurement=UV_INDEX,
|
native_unit_of_measurement=UV_INDEX,
|
||||||
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
value_fn=lambda data: cast(int, data[ATTR_VALUE]),
|
||||||
attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]},
|
attr_fn=lambda data: {
|
||||||
|
ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]]
|
||||||
|
},
|
||||||
translation_key="uv_index_forecast",
|
translation_key="uv_index_forecast",
|
||||||
),
|
),
|
||||||
AccuWeatherSensorDescription(
|
AccuWeatherSensorDescription(
|
||||||
@ -375,7 +387,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: AccuWeatherConfigEntry,
|
entry: AccuWeatherConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add AccuWeather entities from a config_entry."""
|
"""Add AccuWeather entities from a config_entry."""
|
||||||
observation_coordinator: AccuWeatherObservationDataUpdateCoordinator = (
|
observation_coordinator: AccuWeatherObservationDataUpdateCoordinator = (
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||||
"requests_exceeded": "The allowed number of requests to Accuweather API has been exceeded. You have to wait or change API Key."
|
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
@ -26,10 +26,20 @@
|
|||||||
"state": {
|
"state": {
|
||||||
"good": "Good",
|
"good": "Good",
|
||||||
"hazardous": "Hazardous",
|
"hazardous": "Hazardous",
|
||||||
"high": "High",
|
|
||||||
"low": "Low",
|
|
||||||
"moderate": "Moderate",
|
"moderate": "Moderate",
|
||||||
"unhealthy": "Unhealthy"
|
"unhealthy": "Unhealthy",
|
||||||
|
"very_unhealthy": "Very unhealthy"
|
||||||
|
},
|
||||||
|
"state_attributes": {
|
||||||
|
"options": {
|
||||||
|
"state": {
|
||||||
|
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
||||||
|
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
||||||
|
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
||||||
|
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]",
|
||||||
|
"very_unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::very_unhealthy%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"apparent_temperature": {
|
"apparent_temperature": {
|
||||||
@ -62,12 +72,11 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "Level",
|
"name": "Level",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"extreme": "Extreme",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"high": "[%key:common::state::high%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"low": "[%key:common::state::low%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"moderate": "Moderate",
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
"very_high": "[%key:common::state::very_high%]"
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -81,12 +90,11 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"high": "[%key:common::state::high%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"low": "[%key:common::state::low%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
"very_high": "[%key:common::state::very_high%]"
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -100,6 +108,15 @@
|
|||||||
"steady": "Steady",
|
"steady": "Steady",
|
||||||
"rising": "Rising",
|
"rising": "Rising",
|
||||||
"falling": "Falling"
|
"falling": "Falling"
|
||||||
|
},
|
||||||
|
"state_attributes": {
|
||||||
|
"options": {
|
||||||
|
"state": {
|
||||||
|
"falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]",
|
||||||
|
"rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]",
|
||||||
|
"steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ragweed_pollen": {
|
"ragweed_pollen": {
|
||||||
@ -108,12 +125,11 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"high": "[%key:common::state::high%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"low": "[%key:common::state::low%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
"very_high": "[%key:common::state::very_high%]"
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -154,12 +170,11 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"high": "[%key:common::state::high%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"low": "[%key:common::state::low%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
"very_high": "[%key:common::state::very_high%]"
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -170,12 +185,11 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"high": "[%key:common::state::high%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"low": "[%key:common::state::low%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
"very_high": "[%key:common::state::very_high%]"
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -186,12 +200,11 @@
|
|||||||
"level": {
|
"level": {
|
||||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||||
"state": {
|
"state": {
|
||||||
"good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]",
|
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||||
"hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]",
|
"high": "[%key:common::state::high%]",
|
||||||
"high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]",
|
"low": "[%key:common::state::low%]",
|
||||||
"low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]",
|
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||||
"moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]",
|
"very_high": "[%key:common::state::very_high%]"
|
||||||
"unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -222,6 +235,14 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"current_conditions_update_error": {
|
||||||
|
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
|
||||||
|
},
|
||||||
|
"forecast_update_error": {
|
||||||
|
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
|
||||||
|
}
|
||||||
|
},
|
||||||
"system_health": {
|
"system_health": {
|
||||||
"info": {
|
"info": {
|
||||||
"can_reach_server": "Reach AccuWeather server",
|
"can_reach_server": "Reach AccuWeather server",
|
||||||
|
@ -30,7 +30,7 @@ from homeassistant.const import (
|
|||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.util.dt import utc_from_timestamp
|
from homeassistant.util.dt import utc_from_timestamp
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
@ -54,7 +54,7 @@ PARALLEL_UPDATES = 1
|
|||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entry: AccuWeatherConfigEntry,
|
entry: AccuWeatherConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add a AccuWeather weather entity from a config_entry."""
|
"""Add a AccuWeather weather entity from a config_entry."""
|
||||||
async_add_entities([AccuWeatherEntity(entry.runtime_data)])
|
async_add_entities([AccuWeatherEntity(entry.runtime_data)])
|
||||||
|
@ -4,5 +4,6 @@
|
|||||||
"codeowners": [],
|
"codeowners": [],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
|
"quality_scale": "legacy",
|
||||||
"requirements": ["pyserial==3.5"]
|
"requirements": ["pyserial==3.5"]
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ from homeassistant.const import (
|
|||||||
STATE_UNKNOWN,
|
STATE_UNKNOWN,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
import homeassistant.helpers.config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
import homeassistant.helpers.entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
|
||||||
from .hub import PulseHub
|
from .hub import PulseHub
|
||||||
|
|
||||||
|
@ -40,9 +40,10 @@ class AcmedaFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
entry.unique_id for entry in self._async_current_entries()
|
entry.unique_id for entry in self._async_current_entries()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
hubs: list[aiopulse.Hub] = []
|
||||||
with suppress(TimeoutError):
|
with suppress(TimeoutError):
|
||||||
async with timeout(5):
|
async with timeout(5):
|
||||||
hubs: list[aiopulse.Hub] = [
|
hubs = [
|
||||||
hub
|
hub
|
||||||
async for hub in aiopulse.Hub.discover()
|
async for hub in aiopulse.Hub.discover()
|
||||||
if hub.id not in already_configured
|
if hub.id not in already_configured
|
||||||
|
@ -11,7 +11,7 @@ from homeassistant.components.cover import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from . import AcmedaConfigEntry
|
from . import AcmedaConfigEntry
|
||||||
from .const import ACMEDA_HUB_UPDATE
|
from .const import ACMEDA_HUB_UPDATE
|
||||||
@ -22,7 +22,7 @@ from .helpers import async_add_acmeda_entities
|
|||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: AcmedaConfigEntry,
|
config_entry: AcmedaConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Acmeda Rollers from a config entry."""
|
"""Set up the Acmeda Rollers from a config entry."""
|
||||||
hub = config_entry.runtime_data
|
hub = config_entry.runtime_data
|
||||||
|
@ -9,7 +9,7 @@ from aiopulse import Roller
|
|||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .const import DOMAIN, LOGGER
|
from .const import DOMAIN, LOGGER
|
||||||
|
|
||||||
@ -23,7 +23,7 @@ def async_add_acmeda_entities(
|
|||||||
entity_class: type,
|
entity_class: type,
|
||||||
config_entry: AcmedaConfigEntry,
|
config_entry: AcmedaConfigEntry,
|
||||||
current: set[int],
|
current: set[int],
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add any new entities."""
|
"""Add any new entities."""
|
||||||
hub = config_entry.runtime_data
|
hub = config_entry.runtime_data
|
||||||
|
@ -70,7 +70,7 @@ class PulseHub:
|
|||||||
|
|
||||||
async def async_notify_update(self, update_type: aiopulse.UpdateType) -> None:
|
async def async_notify_update(self, update_type: aiopulse.UpdateType) -> None:
|
||||||
"""Evaluate entities when hub reports that update has occurred."""
|
"""Evaluate entities when hub reports that update has occurred."""
|
||||||
LOGGER.debug("Hub {update_type.name} updated")
|
LOGGER.debug("Hub %s updated", update_type.name)
|
||||||
|
|
||||||
if update_type == aiopulse.UpdateType.rollers:
|
if update_type == aiopulse.UpdateType.rollers:
|
||||||
await update_devices(self.hass, self.config_entry, self.api.rollers)
|
await update_devices(self.hass, self.config_entry, self.api.rollers)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user