mirror of
https://github.com/home-assistant/core.git
synced 2025-07-25 22:27:07 +00:00
Merge branch 'dev' into request_host_fix
This commit is contained in:
commit
4d7cb77eca
@ -79,6 +79,7 @@ components: &components
|
|||||||
- homeassistant/components/group/**
|
- homeassistant/components/group/**
|
||||||
- homeassistant/components/hassio/**
|
- homeassistant/components/hassio/**
|
||||||
- homeassistant/components/homeassistant/**
|
- homeassistant/components/homeassistant/**
|
||||||
|
- homeassistant/components/homeassistant_hardware/**
|
||||||
- homeassistant/components/http/**
|
- homeassistant/components/http/**
|
||||||
- homeassistant/components/image/**
|
- homeassistant/components/image/**
|
||||||
- homeassistant/components/input_boolean/**
|
- homeassistant/components/input_boolean/**
|
||||||
@ -127,6 +128,7 @@ tests: &tests
|
|||||||
- tests/*.py
|
- tests/*.py
|
||||||
- tests/auth/**
|
- tests/auth/**
|
||||||
- tests/backports/**
|
- tests/backports/**
|
||||||
|
- tests/components/conftest.py
|
||||||
- tests/components/diagnostics/**
|
- tests/components/diagnostics/**
|
||||||
- tests/components/history/**
|
- tests/components/history/**
|
||||||
- tests/components/logbook/**
|
- tests/components/logbook/**
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Home Assistant Dev",
|
"name": "Home Assistant Dev",
|
||||||
"context": "..",
|
"context": "..",
|
||||||
"dockerFile": "../Dockerfile.dev",
|
"dockerFile": "../Dockerfile.dev",
|
||||||
"postCreateCommand": "script/setup",
|
"postCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder} && script/setup",
|
||||||
"postStartCommand": "script/bootstrap",
|
"postStartCommand": "script/bootstrap",
|
||||||
"containerEnv": {
|
"containerEnv": {
|
||||||
"PYTHONASYNCIODEBUG": "1"
|
"PYTHONASYNCIODEBUG": "1"
|
||||||
@ -12,7 +12,12 @@
|
|||||||
},
|
},
|
||||||
// Port 5683 udp is used by Shelly integration
|
// Port 5683 udp is used by Shelly integration
|
||||||
"appPort": ["8123:8123", "5683:5683/udp"],
|
"appPort": ["8123:8123", "5683:5683/udp"],
|
||||||
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
|
"runArgs": [
|
||||||
|
"-e",
|
||||||
|
"GIT_EDITOR=code --wait",
|
||||||
|
"--security-opt",
|
||||||
|
"label=disable"
|
||||||
|
],
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
@ -53,7 +58,13 @@
|
|||||||
],
|
],
|
||||||
"[python]": {
|
"[python]": {
|
||||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||||
}
|
},
|
||||||
|
"json.schemas": [
|
||||||
|
{
|
||||||
|
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||||
|
"url": "./script/json_schemas/manifest_schema.json"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ docs
|
|||||||
# Development
|
# Development
|
||||||
.devcontainer
|
.devcontainer
|
||||||
.vscode
|
.vscode
|
||||||
|
.tool-versions
|
||||||
|
|
||||||
# Test related files
|
# Test related files
|
||||||
tests
|
tests
|
||||||
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@ -1,2 +1 @@
|
|||||||
custom: https://www.nabucasa.com
|
custom: https://www.openhomefoundation.org
|
||||||
github: balloob
|
|
||||||
|
32
.github/workflows/builder.yml
vendored
32
.github/workflows/builder.yml
vendored
@ -10,7 +10,7 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_TYPE: core
|
BUILD_TYPE: core
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
PIP_TIMEOUT: 60
|
PIP_TIMEOUT: 60
|
||||||
UV_HTTP_TIMEOUT: 60
|
UV_HTTP_TIMEOUT: 60
|
||||||
UV_SYSTEM_PYTHON: "true"
|
UV_SYSTEM_PYTHON: "true"
|
||||||
@ -27,12 +27,12 @@ jobs:
|
|||||||
publish: ${{ steps.version.outputs.publish }}
|
publish: ${{ steps.version.outputs.publish }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ jobs:
|
|||||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||||
|
|
||||||
- name: Upload translations
|
- name: Upload translations
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
path: translations.tar.gz
|
path: translations.tar.gz
|
||||||
@ -90,7 +90,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
@ -116,7 +116,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -242,7 +242,7 @@ jobs:
|
|||||||
- green
|
- green
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set build additional args
|
- name: Set build additional args
|
||||||
run: |
|
run: |
|
||||||
@ -279,7 +279,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
uses: home-assistant/actions/helpers/git-init@master
|
uses: home-assistant/actions/helpers/git-init@master
|
||||||
@ -321,10 +321,10 @@ jobs:
|
|||||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.6.0
|
uses: sigstore/cosign-installer@v3.7.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.2.3"
|
cosign-release: "v2.2.3"
|
||||||
|
|
||||||
@ -451,10 +451,10 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
@ -499,7 +499,7 @@ jobs:
|
|||||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||||
@ -509,7 +509,7 @@ jobs:
|
|||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build Docker image
|
- name: Build Docker image
|
||||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
|
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||||
with:
|
with:
|
||||||
context: . # So action will not pull the repository again
|
context: . # So action will not pull the repository again
|
||||||
file: ./script/hassfest/docker/Dockerfile
|
file: ./script/hassfest/docker/Dockerfile
|
||||||
@ -522,7 +522,7 @@ jobs:
|
|||||||
- name: Push Docker image
|
- name: Push Docker image
|
||||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||||
id: push
|
id: push
|
||||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
|
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||||
with:
|
with:
|
||||||
context: . # So action will not pull the repository again
|
context: . # So action will not pull the repository again
|
||||||
file: ./script/hassfest/docker/Dockerfile
|
file: ./script/hassfest/docker/Dockerfile
|
||||||
@ -531,7 +531,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate artifact attestation
|
- name: Generate artifact attestation
|
||||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||||
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
|
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||||
with:
|
with:
|
||||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||||
subject-digest: ${{ steps.push.outputs.digest }}
|
subject-digest: ${{ steps.push.outputs.digest }}
|
||||||
|
188
.github/workflows/ci.yaml
vendored
188
.github/workflows/ci.yaml
vendored
@ -37,12 +37,12 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 10
|
CACHE_VERSION: 11
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 9
|
MYPY_CACHE_VERSION: 9
|
||||||
HA_SHORT_VERSION: "2024.11"
|
HA_SHORT_VERSION: "2024.12"
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.12"
|
||||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||||
# 10.6 is the current long-term-support
|
# 10.6 is the current long-term-support
|
||||||
@ -93,7 +93,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Generate partial Python venv restore key
|
- name: Generate partial Python venv restore key
|
||||||
id: generate_python_cache_key
|
id: generate_python_cache_key
|
||||||
run: |
|
run: |
|
||||||
@ -231,16 +231,16 @@ jobs:
|
|||||||
- info
|
- info
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.0.2
|
uses: actions/cache@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@ -256,7 +256,7 @@ jobs:
|
|||||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.0.2
|
uses: actions/cache@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@ -277,16 +277,16 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -295,7 +295,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -317,16 +317,16 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -335,7 +335,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -357,16 +357,16 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -375,7 +375,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -447,7 +447,7 @@ jobs:
|
|||||||
- script/hassfest/docker/Dockerfile
|
- script/hassfest/docker/Dockerfile
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@ -466,10 +466,10 @@ jobs:
|
|||||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -482,7 +482,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.0.2
|
uses: actions/cache@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@ -491,7 +491,7 @@ jobs:
|
|||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore uv wheel cache
|
- name: Restore uv wheel cache
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4.0.2
|
uses: actions/cache@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: ${{ env.UV_CACHE_DIR }}
|
path: ${{ env.UV_CACHE_DIR }}
|
||||||
key: >-
|
key: >-
|
||||||
@ -550,16 +550,16 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
libturbojpeg
|
libturbojpeg
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -583,16 +583,16 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -615,37 +615,41 @@ jobs:
|
|||||||
&& github.event.inputs.mypy-only != 'true'
|
&& github.event.inputs.mypy-only != 'true'
|
||||||
|| github.event.inputs.audit-licenses-only == 'true')
|
|| github.event.inputs.audit-licenses-only == 'true')
|
||||||
&& needs.info.outputs.requirements == 'true'
|
&& needs.info.outputs.requirements == 'true'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
key: >-
|
key: >-
|
||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Run pip-licenses
|
- name: Extract license data
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip-licenses --format=json --output-file=licenses.json
|
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||||
- name: Upload licenses
|
- name: Upload licenses
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: licenses
|
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||||
path: licenses.json
|
path: licenses-${{ matrix.python-version }}.json
|
||||||
- name: Process licenses
|
- name: Check licenses
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.licenses
|
python -m script.licenses check licenses-${{ matrix.python-version }}.json
|
||||||
|
|
||||||
pylint:
|
pylint:
|
||||||
name: Check pylint
|
name: Check pylint
|
||||||
@ -660,16 +664,16 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -707,16 +711,16 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -752,10 +756,10 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -768,7 +772,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -776,7 +780,7 @@ jobs:
|
|||||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore mypy cache
|
- name: Restore mypy cache
|
||||||
uses: actions/cache@v4.0.2
|
uses: actions/cache@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: .mypy_cache
|
path: .mypy_cache
|
||||||
key: >-
|
key: >-
|
||||||
@ -827,16 +831,16 @@ jobs:
|
|||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -848,7 +852,7 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||||
- name: Upload pytest_buckets
|
- name: Upload pytest_buckets
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
path: pytest_buckets.txt
|
path: pytest_buckets.txt
|
||||||
@ -891,16 +895,16 @@ jobs:
|
|||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -940,7 +944,8 @@ jobs:
|
|||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
--durations=10 \
|
--durations=10 \
|
||||||
-n auto \
|
--numprocesses auto \
|
||||||
|
--snapshot-details \
|
||||||
--dist=loadfile \
|
--dist=loadfile \
|
||||||
${cov_params[@]} \
|
${cov_params[@]} \
|
||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
@ -949,14 +954,14 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@ -1011,16 +1016,16 @@ jobs:
|
|||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libmariadb-dev-compat
|
libmariadb-dev-compat
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -1062,7 +1067,8 @@ jobs:
|
|||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=20 \
|
--timeout=20 \
|
||||||
-n 1 \
|
--numprocesses 1 \
|
||||||
|
--snapshot-details \
|
||||||
${cov_params[@]} \
|
${cov_params[@]} \
|
||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
--durations=10 \
|
--durations=10 \
|
||||||
@ -1075,7 +1081,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@ -1083,7 +1089,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.mariadb }}
|
steps.pytest-partial.outputs.mariadb }}
|
||||||
@ -1094,7 +1100,7 @@ jobs:
|
|||||||
./script/check_dirty
|
./script/check_dirty
|
||||||
|
|
||||||
pytest-postgres:
|
pytest-postgres:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: ${{ matrix.postgresql-group }}
|
image: ${{ matrix.postgresql-group }}
|
||||||
@ -1134,19 +1140,21 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg
|
||||||
|
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||||
|
sudo apt-get -y install \
|
||||||
postgresql-server-dev-14
|
postgresql-server-dev-14
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -1188,7 +1196,8 @@ jobs:
|
|||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
-n 1 \
|
--numprocesses 1 \
|
||||||
|
--snapshot-details \
|
||||||
${cov_params[@]} \
|
${cov_params[@]} \
|
||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
--durations=0 \
|
--durations=0 \
|
||||||
@ -1202,7 +1211,7 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@ -1210,7 +1219,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{
|
name: coverage-${{ matrix.python-version }}-${{
|
||||||
steps.pytest-partial.outputs.postgresql }}
|
steps.pytest-partial.outputs.postgresql }}
|
||||||
@ -1232,19 +1241,18 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'true'
|
if: needs.info.outputs.test_full_suite == 'true'
|
||||||
uses: codecov/codecov-action@v4.5.0
|
uses: codecov/codecov-action@v5.0.7
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
flags: full-suite
|
flags: full-suite
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
version: v0.6.0
|
|
||||||
|
|
||||||
pytest-partial:
|
pytest-partial:
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -1283,16 +1291,16 @@ jobs:
|
|||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.0.2
|
uses: actions/cache/restore@v4.1.2
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@ -1334,7 +1342,8 @@ jobs:
|
|||||||
python3 -b -X dev -m pytest \
|
python3 -b -X dev -m pytest \
|
||||||
-qq \
|
-qq \
|
||||||
--timeout=9 \
|
--timeout=9 \
|
||||||
-n auto \
|
--numprocesses auto \
|
||||||
|
--snapshot-details \
|
||||||
${cov_params[@]} \
|
${cov_params[@]} \
|
||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
--durations=0 \
|
--durations=0 \
|
||||||
@ -1344,14 +1353,14 @@ jobs:
|
|||||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||||
- name: Upload pytest output
|
- name: Upload pytest output
|
||||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: pytest-*.txt
|
path: pytest-*.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
if: needs.info.outputs.skip_coverage != 'true'
|
if: needs.info.outputs.skip_coverage != 'true'
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@ -1370,15 +1379,14 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.1.8
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: needs.info.outputs.test_full_suite == 'false'
|
if: needs.info.outputs.test_full_suite == 'false'
|
||||||
uses: codecov/codecov-action@v4.5.0
|
uses: codecov/codecov-action@v5.0.7
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
version: v0.6.0
|
|
||||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@ -21,14 +21,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.26.9
|
uses: github/codeql-action/init@v3.27.5
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.26.9
|
uses: github/codeql-action/analyze@v3.27.5
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@ -19,10 +19,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
|
51
.github/workflows/wheels.yml
vendored
51
.github/workflows/wheels.yml
vendored
@ -32,11 +32,11 @@ jobs:
|
|||||||
architectures: ${{ steps.info.outputs.architectures }}
|
architectures: ${{ steps.info.outputs.architectures }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.2.0
|
uses: actions/setup-python@v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
@ -64,11 +64,8 @@ jobs:
|
|||||||
- name: Write env-file
|
- name: Write env-file
|
||||||
run: |
|
run: |
|
||||||
(
|
(
|
||||||
echo "GRPC_BUILD_WITH_BORING_SSL_ASM=false"
|
|
||||||
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true"
|
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true"
|
||||||
echo "GRPC_PYTHON_BUILD_WITH_CYTHON=true"
|
echo "GRPC_PYTHON_BUILD_WITH_CYTHON=true"
|
||||||
echo "GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=true"
|
|
||||||
echo "GRPC_PYTHON_LDFLAGS=-lpthread -Wl,-wrap,memcpy -static-libgcc"
|
|
||||||
|
|
||||||
# Fix out of memory issues with rust
|
# Fix out of memory issues with rust
|
||||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
||||||
@ -82,7 +79,7 @@ jobs:
|
|||||||
) > .env_file
|
) > .env_file
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
@ -90,7 +87,7 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
@ -102,7 +99,7 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
|
|
||||||
- name: Upload requirements_all_wheels
|
- name: Upload requirements_all_wheels
|
||||||
uses: actions/upload-artifact@v4.4.0
|
uses: actions/upload-artifact@v4.4.3
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
path: ./requirements_all_wheels_*.txt
|
path: ./requirements_all_wheels_*.txt
|
||||||
@ -115,11 +112,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
abi: ["cp312"]
|
abi: ["cp312", "cp313"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.1.8
|
||||||
@ -138,14 +135,14 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2024.11.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||||
skip-binary: aiohttp;multidict;yarl
|
skip-binary: aiohttp;multidict;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
@ -159,11 +156,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
abi: ["cp312"]
|
abi: ["cp312", "cp313"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.0
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.1.8
|
uses: actions/download-artifact@v4.1.8
|
||||||
@ -201,19 +198,19 @@ jobs:
|
|||||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||||
|
|
||||||
- name: Create requirements for cython<3
|
- name: Create requirements for cython<3
|
||||||
|
if: matrix.abi == 'cp312'
|
||||||
run: |
|
run: |
|
||||||
# Some dependencies still require 'cython<3'
|
# Some dependencies still require 'cython<3'
|
||||||
# and don't yet use isolated build environments.
|
# and don't yet use isolated build environments.
|
||||||
# Build these first.
|
# Build these first.
|
||||||
# grpcio: https://github.com/grpc/grpc/issues/33918
|
|
||||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||||
|
|
||||||
touch requirements_old-cython.txt
|
touch requirements_old-cython.txt
|
||||||
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
|
|
||||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||||
|
|
||||||
- name: Build wheels (old cython)
|
- name: Build wheels (old cython)
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2024.11.0
|
||||||
|
if: matrix.abi == 'cp312'
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@ -221,50 +218,50 @@ jobs:
|
|||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_old-cython.txt"
|
requirements: "requirements_old-cython.txt"
|
||||||
pip: "'cython<3'"
|
pip: "'cython<3'"
|
||||||
|
|
||||||
- name: Build wheels (part 1)
|
- name: Build wheels (part 1)
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2024.11.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_all.txtaa"
|
requirements: "requirements_all.txtaa"
|
||||||
|
|
||||||
- name: Build wheels (part 2)
|
- name: Build wheels (part 2)
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2024.11.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_all.txtab"
|
requirements: "requirements_all.txtab"
|
||||||
|
|
||||||
- name: Build wheels (part 3)
|
- name: Build wheels (part 3)
|
||||||
uses: home-assistant/wheels@2024.07.1
|
uses: home-assistant/wheels@2024.11.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_all.txtac"
|
requirements: "requirements_all.txtac"
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -79,6 +79,7 @@ pytest-*.txt
|
|||||||
.pydevproject
|
.pydevproject
|
||||||
|
|
||||||
.python-version
|
.python-version
|
||||||
|
.tool-versions
|
||||||
|
|
||||||
# emacs auto backups
|
# emacs auto backups
|
||||||
*~
|
*~
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.6.8
|
rev: v0.8.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
@ -18,7 +18,7 @@ repos:
|
|||||||
exclude_types: [csv, json, html]
|
exclude_types: [csv, json, html]
|
||||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
@ -83,14 +83,14 @@ repos:
|
|||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
language: script
|
language: script
|
||||||
types: [text]
|
types: [text]
|
||||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements\.txt)$
|
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||||
- id: hassfest-metadata
|
- id: hassfest-metadata
|
||||||
name: hassfest-metadata
|
name: hassfest-metadata
|
||||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
language: script
|
language: script
|
||||||
types: [text]
|
types: [text]
|
||||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||||
- id: hassfest-mypy-config
|
- id: hassfest-mypy-config
|
||||||
name: hassfest-mypy-config
|
name: hassfest-mypy-config
|
||||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||||
|
@ -124,6 +124,7 @@ homeassistant.components.bryant_evolution.*
|
|||||||
homeassistant.components.bthome.*
|
homeassistant.components.bthome.*
|
||||||
homeassistant.components.button.*
|
homeassistant.components.button.*
|
||||||
homeassistant.components.calendar.*
|
homeassistant.components.calendar.*
|
||||||
|
homeassistant.components.cambridge_audio.*
|
||||||
homeassistant.components.camera.*
|
homeassistant.components.camera.*
|
||||||
homeassistant.components.canary.*
|
homeassistant.components.canary.*
|
||||||
homeassistant.components.cert_expiry.*
|
homeassistant.components.cert_expiry.*
|
||||||
@ -208,12 +209,14 @@ homeassistant.components.geo_location.*
|
|||||||
homeassistant.components.geocaching.*
|
homeassistant.components.geocaching.*
|
||||||
homeassistant.components.gios.*
|
homeassistant.components.gios.*
|
||||||
homeassistant.components.glances.*
|
homeassistant.components.glances.*
|
||||||
|
homeassistant.components.go2rtc.*
|
||||||
homeassistant.components.goalzero.*
|
homeassistant.components.goalzero.*
|
||||||
homeassistant.components.google.*
|
homeassistant.components.google.*
|
||||||
homeassistant.components.google_assistant_sdk.*
|
homeassistant.components.google_assistant_sdk.*
|
||||||
homeassistant.components.google_cloud.*
|
homeassistant.components.google_cloud.*
|
||||||
homeassistant.components.google_photos.*
|
homeassistant.components.google_photos.*
|
||||||
homeassistant.components.google_sheets.*
|
homeassistant.components.google_sheets.*
|
||||||
|
homeassistant.components.govee_ble.*
|
||||||
homeassistant.components.gpsd.*
|
homeassistant.components.gpsd.*
|
||||||
homeassistant.components.greeneye_monitor.*
|
homeassistant.components.greeneye_monitor.*
|
||||||
homeassistant.components.group.*
|
homeassistant.components.group.*
|
||||||
@ -301,7 +304,6 @@ homeassistant.components.lookin.*
|
|||||||
homeassistant.components.luftdaten.*
|
homeassistant.components.luftdaten.*
|
||||||
homeassistant.components.madvr.*
|
homeassistant.components.madvr.*
|
||||||
homeassistant.components.manual.*
|
homeassistant.components.manual.*
|
||||||
homeassistant.components.map.*
|
|
||||||
homeassistant.components.mastodon.*
|
homeassistant.components.mastodon.*
|
||||||
homeassistant.components.matrix.*
|
homeassistant.components.matrix.*
|
||||||
homeassistant.components.matter.*
|
homeassistant.components.matter.*
|
||||||
@ -322,11 +324,13 @@ homeassistant.components.moon.*
|
|||||||
homeassistant.components.mopeka.*
|
homeassistant.components.mopeka.*
|
||||||
homeassistant.components.motionmount.*
|
homeassistant.components.motionmount.*
|
||||||
homeassistant.components.mqtt.*
|
homeassistant.components.mqtt.*
|
||||||
|
homeassistant.components.music_assistant.*
|
||||||
homeassistant.components.my.*
|
homeassistant.components.my.*
|
||||||
homeassistant.components.mysensors.*
|
homeassistant.components.mysensors.*
|
||||||
homeassistant.components.myuplink.*
|
homeassistant.components.myuplink.*
|
||||||
homeassistant.components.nam.*
|
homeassistant.components.nam.*
|
||||||
homeassistant.components.nanoleaf.*
|
homeassistant.components.nanoleaf.*
|
||||||
|
homeassistant.components.nasweb.*
|
||||||
homeassistant.components.neato.*
|
homeassistant.components.neato.*
|
||||||
homeassistant.components.nest.*
|
homeassistant.components.nest.*
|
||||||
homeassistant.components.netatmo.*
|
homeassistant.components.netatmo.*
|
||||||
@ -336,6 +340,7 @@ homeassistant.components.nfandroidtv.*
|
|||||||
homeassistant.components.nightscout.*
|
homeassistant.components.nightscout.*
|
||||||
homeassistant.components.nissan_leaf.*
|
homeassistant.components.nissan_leaf.*
|
||||||
homeassistant.components.no_ip.*
|
homeassistant.components.no_ip.*
|
||||||
|
homeassistant.components.nordpool.*
|
||||||
homeassistant.components.notify.*
|
homeassistant.components.notify.*
|
||||||
homeassistant.components.notion.*
|
homeassistant.components.notion.*
|
||||||
homeassistant.components.number.*
|
homeassistant.components.number.*
|
||||||
@ -345,6 +350,7 @@ homeassistant.components.oncue.*
|
|||||||
homeassistant.components.onewire.*
|
homeassistant.components.onewire.*
|
||||||
homeassistant.components.onkyo.*
|
homeassistant.components.onkyo.*
|
||||||
homeassistant.components.open_meteo.*
|
homeassistant.components.open_meteo.*
|
||||||
|
homeassistant.components.openai_conversation.*
|
||||||
homeassistant.components.openexchangerates.*
|
homeassistant.components.openexchangerates.*
|
||||||
homeassistant.components.opensky.*
|
homeassistant.components.opensky.*
|
||||||
homeassistant.components.openuv.*
|
homeassistant.components.openuv.*
|
||||||
@ -352,6 +358,7 @@ homeassistant.components.oralb.*
|
|||||||
homeassistant.components.otbr.*
|
homeassistant.components.otbr.*
|
||||||
homeassistant.components.overkiz.*
|
homeassistant.components.overkiz.*
|
||||||
homeassistant.components.p1_monitor.*
|
homeassistant.components.p1_monitor.*
|
||||||
|
homeassistant.components.panel_custom.*
|
||||||
homeassistant.components.peco.*
|
homeassistant.components.peco.*
|
||||||
homeassistant.components.persistent_notification.*
|
homeassistant.components.persistent_notification.*
|
||||||
homeassistant.components.pi_hole.*
|
homeassistant.components.pi_hole.*
|
||||||
@ -369,6 +376,7 @@ homeassistant.components.pvoutput.*
|
|||||||
homeassistant.components.qnap_qsw.*
|
homeassistant.components.qnap_qsw.*
|
||||||
homeassistant.components.rabbitair.*
|
homeassistant.components.rabbitair.*
|
||||||
homeassistant.components.radarr.*
|
homeassistant.components.radarr.*
|
||||||
|
homeassistant.components.radio_browser.*
|
||||||
homeassistant.components.rainforest_raven.*
|
homeassistant.components.rainforest_raven.*
|
||||||
homeassistant.components.rainmachine.*
|
homeassistant.components.rainmachine.*
|
||||||
homeassistant.components.raspberry_pi.*
|
homeassistant.components.raspberry_pi.*
|
||||||
@ -377,6 +385,7 @@ homeassistant.components.recollect_waste.*
|
|||||||
homeassistant.components.recorder.*
|
homeassistant.components.recorder.*
|
||||||
homeassistant.components.remote.*
|
homeassistant.components.remote.*
|
||||||
homeassistant.components.renault.*
|
homeassistant.components.renault.*
|
||||||
|
homeassistant.components.reolink.*
|
||||||
homeassistant.components.repairs.*
|
homeassistant.components.repairs.*
|
||||||
homeassistant.components.rest.*
|
homeassistant.components.rest.*
|
||||||
homeassistant.components.rest_command.*
|
homeassistant.components.rest_command.*
|
||||||
@ -406,6 +415,7 @@ homeassistant.components.sensor.*
|
|||||||
homeassistant.components.sensoterra.*
|
homeassistant.components.sensoterra.*
|
||||||
homeassistant.components.senz.*
|
homeassistant.components.senz.*
|
||||||
homeassistant.components.sfr_box.*
|
homeassistant.components.sfr_box.*
|
||||||
|
homeassistant.components.shell_command.*
|
||||||
homeassistant.components.shelly.*
|
homeassistant.components.shelly.*
|
||||||
homeassistant.components.shopping_list.*
|
homeassistant.components.shopping_list.*
|
||||||
homeassistant.components.simplepush.*
|
homeassistant.components.simplepush.*
|
||||||
@ -420,6 +430,7 @@ homeassistant.components.snooz.*
|
|||||||
homeassistant.components.solarlog.*
|
homeassistant.components.solarlog.*
|
||||||
homeassistant.components.sonarr.*
|
homeassistant.components.sonarr.*
|
||||||
homeassistant.components.speedtestdotnet.*
|
homeassistant.components.speedtestdotnet.*
|
||||||
|
homeassistant.components.spotify.*
|
||||||
homeassistant.components.sql.*
|
homeassistant.components.sql.*
|
||||||
homeassistant.components.squeezebox.*
|
homeassistant.components.squeezebox.*
|
||||||
homeassistant.components.ssdp.*
|
homeassistant.components.ssdp.*
|
||||||
@ -427,6 +438,7 @@ homeassistant.components.starlink.*
|
|||||||
homeassistant.components.statistics.*
|
homeassistant.components.statistics.*
|
||||||
homeassistant.components.steamist.*
|
homeassistant.components.steamist.*
|
||||||
homeassistant.components.stookalert.*
|
homeassistant.components.stookalert.*
|
||||||
|
homeassistant.components.stookwijzer.*
|
||||||
homeassistant.components.stream.*
|
homeassistant.components.stream.*
|
||||||
homeassistant.components.streamlabswater.*
|
homeassistant.components.streamlabswater.*
|
||||||
homeassistant.components.stt.*
|
homeassistant.components.stt.*
|
||||||
@ -434,6 +446,7 @@ homeassistant.components.suez_water.*
|
|||||||
homeassistant.components.sun.*
|
homeassistant.components.sun.*
|
||||||
homeassistant.components.surepetcare.*
|
homeassistant.components.surepetcare.*
|
||||||
homeassistant.components.switch.*
|
homeassistant.components.switch.*
|
||||||
|
homeassistant.components.switch_as_x.*
|
||||||
homeassistant.components.switchbee.*
|
homeassistant.components.switchbee.*
|
||||||
homeassistant.components.switchbot_cloud.*
|
homeassistant.components.switchbot_cloud.*
|
||||||
homeassistant.components.switcher_kis.*
|
homeassistant.components.switcher_kis.*
|
||||||
@ -502,6 +515,7 @@ homeassistant.components.whois.*
|
|||||||
homeassistant.components.withings.*
|
homeassistant.components.withings.*
|
||||||
homeassistant.components.wiz.*
|
homeassistant.components.wiz.*
|
||||||
homeassistant.components.wled.*
|
homeassistant.components.wled.*
|
||||||
|
homeassistant.components.workday.*
|
||||||
homeassistant.components.worldclock.*
|
homeassistant.components.worldclock.*
|
||||||
homeassistant.components.xiaomi_ble.*
|
homeassistant.components.xiaomi_ble.*
|
||||||
homeassistant.components.yale_smart_alarm.*
|
homeassistant.components.yale_smart_alarm.*
|
||||||
|
10
.vscode/settings.default.json
vendored
10
.vscode/settings.default.json
vendored
@ -6,5 +6,13 @@
|
|||||||
// https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings
|
// https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings
|
||||||
"python.testing.pytestEnabled": false,
|
"python.testing.pytestEnabled": false,
|
||||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||||
"pylint.importStrategy": "fromEnvironment"
|
"pylint.importStrategy": "fromEnvironment",
|
||||||
|
"json.schemas": [
|
||||||
|
{
|
||||||
|
"fileMatch": [
|
||||||
|
"homeassistant/components/*/manifest.json"
|
||||||
|
],
|
||||||
|
"url": "./script/json_schemas/manifest_schema.json"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
16
.vscode/tasks.json
vendored
16
.vscode/tasks.json
vendored
@ -87,6 +87,22 @@
|
|||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"label": "Update syrupy snapshots",
|
||||||
|
"detail": "Update syrupy snapshots for a given integration.",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "python3 -m pytest ./tests/components/${input:integrationName} --snapshot-update",
|
||||||
|
"dependsOn": ["Compile English translations"],
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"label": "Generate Requirements",
|
"label": "Generate Requirements",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
|
56
CODEOWNERS
56
CODEOWNERS
@ -40,6 +40,8 @@ build.json @home-assistant/supervisor
|
|||||||
# Integrations
|
# Integrations
|
||||||
/homeassistant/components/abode/ @shred86
|
/homeassistant/components/abode/ @shred86
|
||||||
/tests/components/abode/ @shred86
|
/tests/components/abode/ @shred86
|
||||||
|
/homeassistant/components/acaia/ @zweckj
|
||||||
|
/tests/components/acaia/ @zweckj
|
||||||
/homeassistant/components/accuweather/ @bieniu
|
/homeassistant/components/accuweather/ @bieniu
|
||||||
/tests/components/accuweather/ @bieniu
|
/tests/components/accuweather/ @bieniu
|
||||||
/homeassistant/components/acmeda/ @atmurray
|
/homeassistant/components/acmeda/ @atmurray
|
||||||
@ -496,8 +498,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/freebox/ @hacf-fr @Quentame
|
/tests/components/freebox/ @hacf-fr @Quentame
|
||||||
/homeassistant/components/freedompro/ @stefano055415
|
/homeassistant/components/freedompro/ @stefano055415
|
||||||
/tests/components/freedompro/ @stefano055415
|
/tests/components/freedompro/ @stefano055415
|
||||||
/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||||
/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||||
/tests/components/fritzbox/ @mib1185 @flabbamann
|
/tests/components/fritzbox/ @mib1185 @flabbamann
|
||||||
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
|
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
|
||||||
@ -544,6 +546,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/github/ @timmo001 @ludeeus
|
/tests/components/github/ @timmo001 @ludeeus
|
||||||
/homeassistant/components/glances/ @engrbm87
|
/homeassistant/components/glances/ @engrbm87
|
||||||
/tests/components/glances/ @engrbm87
|
/tests/components/glances/ @engrbm87
|
||||||
|
/homeassistant/components/go2rtc/ @home-assistant/core
|
||||||
|
/tests/components/go2rtc/ @home-assistant/core
|
||||||
/homeassistant/components/goalzero/ @tkdrob
|
/homeassistant/components/goalzero/ @tkdrob
|
||||||
/tests/components/goalzero/ @tkdrob
|
/tests/components/goalzero/ @tkdrob
|
||||||
/homeassistant/components/gogogate2/ @vangorra
|
/homeassistant/components/gogogate2/ @vangorra
|
||||||
@ -584,8 +588,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/group/ @home-assistant/core
|
/tests/components/group/ @home-assistant/core
|
||||||
/homeassistant/components/guardian/ @bachya
|
/homeassistant/components/guardian/ @bachya
|
||||||
/tests/components/guardian/ @bachya
|
/tests/components/guardian/ @bachya
|
||||||
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
/homeassistant/components/habitica/ @tr4nt0r
|
||||||
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
/tests/components/habitica/ @tr4nt0r
|
||||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||||
/tests/components/hardkernel/ @home-assistant/core
|
/tests/components/hardkernel/ @home-assistant/core
|
||||||
/homeassistant/components/hardware/ @home-assistant/core
|
/homeassistant/components/hardware/ @home-assistant/core
|
||||||
@ -615,8 +619,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/hlk_sw16/ @jameshilliard
|
/tests/components/hlk_sw16/ @jameshilliard
|
||||||
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
||||||
/tests/components/holiday/ @jrieger @gjohansson-ST
|
/tests/components/holiday/ @jrieger @gjohansson-ST
|
||||||
/homeassistant/components/home_connect/ @DavidMStraub
|
/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98
|
||||||
/tests/components/home_connect/ @DavidMStraub
|
/tests/components/home_connect/ @DavidMStraub @Diegorro98
|
||||||
/homeassistant/components/homeassistant/ @home-assistant/core
|
/homeassistant/components/homeassistant/ @home-assistant/core
|
||||||
/tests/components/homeassistant/ @home-assistant/core
|
/tests/components/homeassistant/ @home-assistant/core
|
||||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||||
@ -657,6 +661,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||||
/homeassistant/components/husqvarna_automower/ @Thomas55555
|
/homeassistant/components/husqvarna_automower/ @Thomas55555
|
||||||
/tests/components/husqvarna_automower/ @Thomas55555
|
/tests/components/husqvarna_automower/ @Thomas55555
|
||||||
|
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||||
|
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||||
/homeassistant/components/huum/ @frwickst
|
/homeassistant/components/huum/ @frwickst
|
||||||
/tests/components/huum/ @frwickst
|
/tests/components/huum/ @frwickst
|
||||||
/homeassistant/components/hvv_departures/ @vigonotion
|
/homeassistant/components/hvv_departures/ @vigonotion
|
||||||
@ -817,6 +823,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/lektrico/ @lektrico
|
/tests/components/lektrico/ @lektrico
|
||||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||||
|
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||||
|
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||||
/homeassistant/components/lidarr/ @tkdrob
|
/homeassistant/components/lidarr/ @tkdrob
|
||||||
/tests/components/lidarr/ @tkdrob
|
/tests/components/lidarr/ @tkdrob
|
||||||
/homeassistant/components/lifx/ @Djelibeybi
|
/homeassistant/components/lifx/ @Djelibeybi
|
||||||
@ -948,6 +956,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/msteams/ @peroyvind
|
/homeassistant/components/msteams/ @peroyvind
|
||||||
/homeassistant/components/mullvad/ @meichthys
|
/homeassistant/components/mullvad/ @meichthys
|
||||||
/tests/components/mullvad/ @meichthys
|
/tests/components/mullvad/ @meichthys
|
||||||
|
/homeassistant/components/music_assistant/ @music-assistant
|
||||||
|
/tests/components/music_assistant/ @music-assistant
|
||||||
/homeassistant/components/mutesync/ @currentoor
|
/homeassistant/components/mutesync/ @currentoor
|
||||||
/tests/components/mutesync/ @currentoor
|
/tests/components/mutesync/ @currentoor
|
||||||
/homeassistant/components/my/ @home-assistant/core
|
/homeassistant/components/my/ @home-assistant/core
|
||||||
@ -962,8 +972,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/nam/ @bieniu
|
/tests/components/nam/ @bieniu
|
||||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||||
/homeassistant/components/neato/ @Santobert
|
/homeassistant/components/nasweb/ @nasWebio
|
||||||
/tests/components/neato/ @Santobert
|
/tests/components/nasweb/ @nasWebio
|
||||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||||
/homeassistant/components/ness_alarm/ @nickw444
|
/homeassistant/components/ness_alarm/ @nickw444
|
||||||
/tests/components/ness_alarm/ @nickw444
|
/tests/components/ness_alarm/ @nickw444
|
||||||
@ -1002,6 +1012,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||||
|
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||||
|
/tests/components/nordpool/ @gjohansson-ST
|
||||||
/homeassistant/components/notify/ @home-assistant/core
|
/homeassistant/components/notify/ @home-assistant/core
|
||||||
/tests/components/notify/ @home-assistant/core
|
/tests/components/notify/ @home-assistant/core
|
||||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||||
@ -1045,6 +1057,7 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||||
/tests/components/onewire/ @garbled1 @epenet
|
/tests/components/onewire/ @garbled1 @epenet
|
||||||
/homeassistant/components/onkyo/ @arturpragacz
|
/homeassistant/components/onkyo/ @arturpragacz
|
||||||
|
/tests/components/onkyo/ @arturpragacz
|
||||||
/homeassistant/components/onvif/ @hunterjm
|
/homeassistant/components/onvif/ @hunterjm
|
||||||
/tests/components/onvif/ @hunterjm
|
/tests/components/onvif/ @hunterjm
|
||||||
/homeassistant/components/open_meteo/ @frenck
|
/homeassistant/components/open_meteo/ @frenck
|
||||||
@ -1086,10 +1099,10 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/ovo_energy/ @timmo001
|
/tests/components/ovo_energy/ @timmo001
|
||||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||||
/tests/components/p1_monitor/ @klaasnicolaas
|
/tests/components/p1_monitor/ @klaasnicolaas
|
||||||
|
/homeassistant/components/palazzetti/ @dotvav
|
||||||
|
/tests/components/palazzetti/ @dotvav
|
||||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||||
/tests/components/panel_custom/ @home-assistant/frontend
|
/tests/components/panel_custom/ @home-assistant/frontend
|
||||||
/homeassistant/components/panel_iframe/ @home-assistant/frontend
|
|
||||||
/tests/components/panel_iframe/ @home-assistant/frontend
|
|
||||||
/homeassistant/components/peco/ @IceBotYT
|
/homeassistant/components/peco/ @IceBotYT
|
||||||
/tests/components/peco/ @IceBotYT
|
/tests/components/peco/ @IceBotYT
|
||||||
/homeassistant/components/pegel_online/ @mib1185
|
/homeassistant/components/pegel_online/ @mib1185
|
||||||
@ -1237,8 +1250,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/roku/ @ctalkington
|
/tests/components/roku/ @ctalkington
|
||||||
/homeassistant/components/romy/ @xeniter
|
/homeassistant/components/romy/ @xeniter
|
||||||
/tests/components/romy/ @xeniter
|
/tests/components/romy/ @xeniter
|
||||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous
|
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous
|
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||||
/homeassistant/components/roon/ @pavoni
|
/homeassistant/components/roon/ @pavoni
|
||||||
/tests/components/roon/ @pavoni
|
/tests/components/roon/ @pavoni
|
||||||
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
||||||
@ -1331,6 +1344,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/siren/ @home-assistant/core @raman325
|
/tests/components/siren/ @home-assistant/core @raman325
|
||||||
/homeassistant/components/sisyphus/ @jkeljo
|
/homeassistant/components/sisyphus/ @jkeljo
|
||||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||||
|
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||||
|
/tests/components/sky_remote/ @dunnmj @saty9
|
||||||
/homeassistant/components/skybell/ @tkdrob
|
/homeassistant/components/skybell/ @tkdrob
|
||||||
/tests/components/skybell/ @tkdrob
|
/tests/components/skybell/ @tkdrob
|
||||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||||
@ -1349,6 +1364,7 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/smarttub/ @mdz
|
/homeassistant/components/smarttub/ @mdz
|
||||||
/tests/components/smarttub/ @mdz
|
/tests/components/smarttub/ @mdz
|
||||||
/homeassistant/components/smarty/ @z0mbieprocess
|
/homeassistant/components/smarty/ @z0mbieprocess
|
||||||
|
/tests/components/smarty/ @z0mbieprocess
|
||||||
/homeassistant/components/smhi/ @gjohansson-ST
|
/homeassistant/components/smhi/ @gjohansson-ST
|
||||||
/tests/components/smhi/ @gjohansson-ST
|
/tests/components/smhi/ @gjohansson-ST
|
||||||
/homeassistant/components/smlight/ @tl-sl
|
/homeassistant/components/smlight/ @tl-sl
|
||||||
@ -1382,15 +1398,13 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/spaceapi/ @fabaff
|
/tests/components/spaceapi/ @fabaff
|
||||||
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||||
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||||
/homeassistant/components/spider/ @peternijssen
|
|
||||||
/tests/components/spider/ @peternijssen
|
|
||||||
/homeassistant/components/splunk/ @Bre77
|
/homeassistant/components/splunk/ @Bre77
|
||||||
/homeassistant/components/spotify/ @frenck @joostlek
|
/homeassistant/components/spotify/ @frenck @joostlek
|
||||||
/tests/components/spotify/ @frenck @joostlek
|
/tests/components/spotify/ @frenck @joostlek
|
||||||
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
||||||
/tests/components/sql/ @gjohansson-ST @dougiteixeira
|
/tests/components/sql/ @gjohansson-ST @dougiteixeira
|
||||||
/homeassistant/components/squeezebox/ @rajlaud
|
/homeassistant/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||||
/tests/components/squeezebox/ @rajlaud
|
/tests/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||||
/homeassistant/components/srp_energy/ @briglx
|
/homeassistant/components/srp_energy/ @briglx
|
||||||
/tests/components/srp_energy/ @briglx
|
/tests/components/srp_energy/ @briglx
|
||||||
/homeassistant/components/starline/ @anonym-tsk
|
/homeassistant/components/starline/ @anonym-tsk
|
||||||
@ -1414,8 +1428,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/stt/ @home-assistant/core
|
/tests/components/stt/ @home-assistant/core
|
||||||
/homeassistant/components/subaru/ @G-Two
|
/homeassistant/components/subaru/ @G-Two
|
||||||
/tests/components/subaru/ @G-Two
|
/tests/components/subaru/ @G-Two
|
||||||
/homeassistant/components/suez_water/ @ooii
|
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||||
/tests/components/suez_water/ @ooii
|
/tests/components/suez_water/ @ooii @jb101010-2
|
||||||
/homeassistant/components/sun/ @Swamp-Ig
|
/homeassistant/components/sun/ @Swamp-Ig
|
||||||
/tests/components/sun/ @Swamp-Ig
|
/tests/components/sun/ @Swamp-Ig
|
||||||
/homeassistant/components/sunweg/ @rokam
|
/homeassistant/components/sunweg/ @rokam
|
||||||
@ -1473,8 +1487,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/tedee/ @patrickhilker @zweckj
|
/tests/components/tedee/ @patrickhilker @zweckj
|
||||||
/homeassistant/components/tellduslive/ @fredrike
|
/homeassistant/components/tellduslive/ @fredrike
|
||||||
/tests/components/tellduslive/ @fredrike
|
/tests/components/tellduslive/ @fredrike
|
||||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||||
/homeassistant/components/tesla_fleet/ @Bre77
|
/homeassistant/components/tesla_fleet/ @Bre77
|
||||||
/tests/components/tesla_fleet/ @Bre77
|
/tests/components/tesla_fleet/ @Bre77
|
||||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||||
@ -1559,6 +1573,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/unifi/ @Kane610
|
/tests/components/unifi/ @Kane610
|
||||||
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
||||||
/homeassistant/components/unifiled/ @florisvdk
|
/homeassistant/components/unifiled/ @florisvdk
|
||||||
|
/homeassistant/components/unifiprotect/ @RaHehl
|
||||||
|
/tests/components/unifiprotect/ @RaHehl
|
||||||
/homeassistant/components/upb/ @gwww
|
/homeassistant/components/upb/ @gwww
|
||||||
/tests/components/upb/ @gwww
|
/tests/components/upb/ @gwww
|
||||||
/homeassistant/components/upc_connect/ @pvizeli @fabaff
|
/homeassistant/components/upc_connect/ @pvizeli @fabaff
|
||||||
|
20
Dockerfile
20
Dockerfile
@ -7,12 +7,13 @@ FROM ${BUILD_FROM}
|
|||||||
# Synchronize with homeassistant/core.py:async_stop
|
# Synchronize with homeassistant/core.py:async_stop
|
||||||
ENV \
|
ENV \
|
||||||
S6_SERVICES_GRACETIME=240000 \
|
S6_SERVICES_GRACETIME=240000 \
|
||||||
UV_SYSTEM_PYTHON=true
|
UV_SYSTEM_PYTHON=true \
|
||||||
|
UV_NO_CACHE=true
|
||||||
|
|
||||||
ARG QEMU_CPU
|
ARG QEMU_CPU
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.4.15
|
RUN pip3 install uv==0.5.4
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
@ -44,4 +45,19 @@ RUN \
|
|||||||
# Home Assistant S6-Overlay
|
# Home Assistant S6-Overlay
|
||||||
COPY rootfs /
|
COPY rootfs /
|
||||||
|
|
||||||
|
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||||
|
ARG BUILD_ARCH
|
||||||
|
# Get go2rtc binary
|
||||||
|
RUN \
|
||||||
|
case "${BUILD_ARCH}" in \
|
||||||
|
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||||
|
"armhf") go2rtc_suffix='armv6' ;; \
|
||||||
|
"armv7") go2rtc_suffix='arm' ;; \
|
||||||
|
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||||
|
esac \
|
||||||
|
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||||
|
&& chmod +x /bin/go2rtc \
|
||||||
|
# Verify go2rtc can be executed
|
||||||
|
&& go2rtc --version
|
||||||
|
|
||||||
WORKDIR /config
|
WORKDIR /config
|
||||||
|
@ -35,6 +35,9 @@ RUN \
|
|||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Add go2rtc binary
|
||||||
|
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv
|
RUN pip3 install uv
|
||||||
|
|
||||||
|
@ -7,8 +7,6 @@ Check out `home-assistant.io <https://home-assistant.io>`__ for `a
|
|||||||
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
|
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
|
||||||
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|
||||||
|
|
||||||
This is a project of the `Open Home Foundation <https://www.openhomefoundation.org/>`__.
|
|
||||||
|
|
||||||
|screenshot-states|
|
|screenshot-states|
|
||||||
|
|
||||||
Featured integrations
|
Featured integrations
|
||||||
@ -22,9 +20,14 @@ components <https://developers.home-assistant.io/docs/creating_component_index/>
|
|||||||
If you run into issues while using Home Assistant or during development
|
If you run into issues while using Home Assistant or during development
|
||||||
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
|
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
|
||||||
|
|
||||||
|
|ohf-logo|
|
||||||
|
|
||||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||||
:target: https://www.home-assistant.io/join-chat/
|
:target: https://www.home-assistant.io/join-chat/
|
||||||
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
|
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
|
||||||
:target: https://demo.home-assistant.io
|
:target: https://demo.home-assistant.io
|
||||||
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
|
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
|
||||||
:target: https://home-assistant.io/integrations/
|
:target: https://home-assistant.io/integrations/
|
||||||
|
.. |ohf-logo| image:: https://www.openhomefoundation.org/badges/home-assistant.png
|
||||||
|
:alt: Home Assistant - A project from the Open Home Foundation
|
||||||
|
:target: https://www.openhomefoundation.org/
|
||||||
|
10
build.yaml
10
build.yaml
@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
|
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
|
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
|
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
|
@ -9,6 +9,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
from .backup_restore import restore_backup
|
||||||
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
||||||
|
|
||||||
FAULT_LOG_FILENAME = "home-assistant.log.fault"
|
FAULT_LOG_FILENAME = "home-assistant.log.fault"
|
||||||
@ -182,6 +183,9 @@ def main() -> int:
|
|||||||
return scripts.run(args.script)
|
return scripts.run(args.script)
|
||||||
|
|
||||||
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
|
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
|
||||||
|
if restore_backup(config_dir):
|
||||||
|
return RESTART_EXIT_CODE
|
||||||
|
|
||||||
ensure_config_path(config_dir)
|
ensure_config_path(config_dir)
|
||||||
|
|
||||||
# pylint: disable-next=import-outside-toplevel
|
# pylint: disable-next=import-outside-toplevel
|
||||||
|
@ -12,7 +12,6 @@ from typing import Any, cast
|
|||||||
|
|
||||||
import jwt
|
import jwt
|
||||||
|
|
||||||
from homeassistant import data_entry_flow
|
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
CALLBACK_TYPE,
|
CALLBACK_TYPE,
|
||||||
HassJob,
|
HassJob,
|
||||||
@ -20,13 +19,14 @@ from homeassistant.core import (
|
|||||||
HomeAssistant,
|
HomeAssistant,
|
||||||
callback,
|
callback,
|
||||||
)
|
)
|
||||||
|
from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType
|
||||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import auth_store, jwt_wrapper, models
|
from . import auth_store, jwt_wrapper, models
|
||||||
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION
|
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION
|
||||||
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
||||||
from .models import AuthFlowResult
|
from .models import AuthFlowContext, AuthFlowResult
|
||||||
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
||||||
from .providers.homeassistant import HassAuthProvider
|
from .providers.homeassistant import HassAuthProvider
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ async def auth_manager_from_config(
|
|||||||
|
|
||||||
|
|
||||||
class AuthManagerFlowManager(
|
class AuthManagerFlowManager(
|
||||||
data_entry_flow.FlowManager[AuthFlowResult, tuple[str, str]]
|
FlowManager[AuthFlowContext, AuthFlowResult, tuple[str, str]]
|
||||||
):
|
):
|
||||||
"""Manage authentication flows."""
|
"""Manage authentication flows."""
|
||||||
|
|
||||||
@ -113,7 +113,7 @@ class AuthManagerFlowManager(
|
|||||||
self,
|
self,
|
||||||
handler_key: tuple[str, str],
|
handler_key: tuple[str, str],
|
||||||
*,
|
*,
|
||||||
context: dict[str, Any] | None = None,
|
context: AuthFlowContext | None = None,
|
||||||
data: dict[str, Any] | None = None,
|
data: dict[str, Any] | None = None,
|
||||||
) -> LoginFlow:
|
) -> LoginFlow:
|
||||||
"""Create a login flow."""
|
"""Create a login flow."""
|
||||||
@ -124,7 +124,7 @@ class AuthManagerFlowManager(
|
|||||||
|
|
||||||
async def async_finish_flow(
|
async def async_finish_flow(
|
||||||
self,
|
self,
|
||||||
flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]],
|
flow: FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||||
result: AuthFlowResult,
|
result: AuthFlowResult,
|
||||||
) -> AuthFlowResult:
|
) -> AuthFlowResult:
|
||||||
"""Return a user as result of login flow.
|
"""Return a user as result of login flow.
|
||||||
@ -134,7 +134,7 @@ class AuthManagerFlowManager(
|
|||||||
"""
|
"""
|
||||||
flow = cast(LoginFlow, flow)
|
flow = cast(LoginFlow, flow)
|
||||||
|
|
||||||
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
if result["type"] != FlowResultType.CREATE_ENTRY:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# we got final result
|
# we got final result
|
||||||
|
@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads
|
|||||||
JWT_TOKEN_CACHE_SIZE = 16
|
JWT_TOKEN_CACHE_SIZE = 16
|
||||||
MAX_TOKEN_SIZE = 8192
|
MAX_TOKEN_SIZE = 8192
|
||||||
|
|
||||||
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
|
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti")
|
||||||
|
|
||||||
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
|
||||||
"require": []
|
"require": []
|
||||||
|
@ -177,17 +177,17 @@ class TotpAuthModule(MultiFactorAuthModule):
|
|||||||
class TotpSetupFlow(SetupFlow):
|
class TotpSetupFlow(SetupFlow):
|
||||||
"""Handler for the setup flow."""
|
"""Handler for the setup flow."""
|
||||||
|
|
||||||
|
_auth_module: TotpAuthModule
|
||||||
|
_ota_secret: str
|
||||||
|
_url: str
|
||||||
|
_image: str
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
|
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the setup flow."""
|
"""Initialize the setup flow."""
|
||||||
super().__init__(auth_module, setup_schema, user.id)
|
super().__init__(auth_module, setup_schema, user.id)
|
||||||
# to fix typing complaint
|
|
||||||
self._auth_module: TotpAuthModule = auth_module
|
|
||||||
self._user = user
|
self._user = user
|
||||||
self._ota_secret: str = ""
|
|
||||||
self._url: str | None = None
|
|
||||||
self._image: str | None = None
|
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
@ -214,12 +214,11 @@ class TotpSetupFlow(SetupFlow):
|
|||||||
errors["base"] = "invalid_code"
|
errors["base"] = "invalid_code"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
hass = self._auth_module.hass
|
|
||||||
(
|
(
|
||||||
self._ota_secret,
|
self._ota_secret,
|
||||||
self._url,
|
self._url,
|
||||||
self._image,
|
self._image,
|
||||||
) = await hass.async_add_executor_job(
|
) = await self._auth_module.hass.async_add_executor_job(
|
||||||
_generate_secret_and_qr_code,
|
_generate_secret_and_qr_code,
|
||||||
str(self._user.name),
|
str(self._user.name),
|
||||||
)
|
)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from functools import cached_property
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
import secrets
|
import secrets
|
||||||
from typing import Any, NamedTuple
|
from typing import Any, NamedTuple
|
||||||
import uuid
|
import uuid
|
||||||
@ -11,9 +11,10 @@ import uuid
|
|||||||
import attr
|
import attr
|
||||||
from attr import Attribute
|
from attr import Attribute
|
||||||
from attr.setters import validate
|
from attr.setters import validate
|
||||||
|
from propcache import cached_property
|
||||||
|
|
||||||
from homeassistant.const import __version__
|
from homeassistant.const import __version__
|
||||||
from homeassistant.data_entry_flow import FlowResult
|
from homeassistant.data_entry_flow import FlowContext, FlowResult
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import permissions as perm_mdl
|
from . import permissions as perm_mdl
|
||||||
@ -23,7 +24,16 @@ TOKEN_TYPE_NORMAL = "normal"
|
|||||||
TOKEN_TYPE_SYSTEM = "system"
|
TOKEN_TYPE_SYSTEM = "system"
|
||||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
||||||
|
|
||||||
AuthFlowResult = FlowResult[tuple[str, str]]
|
|
||||||
|
class AuthFlowContext(FlowContext, total=False):
|
||||||
|
"""Typed context dict for auth flow."""
|
||||||
|
|
||||||
|
credential_only: bool
|
||||||
|
ip_address: IPv4Address | IPv6Address
|
||||||
|
redirect_uri: str
|
||||||
|
|
||||||
|
|
||||||
|
AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
|
||||||
|
|
||||||
|
|
||||||
@attr.s(slots=True)
|
@attr.s(slots=True)
|
||||||
|
@ -10,9 +10,10 @@ from typing import Any
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from homeassistant import data_entry_flow, requirements
|
from homeassistant import requirements
|
||||||
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.data_entry_flow import FlowHandler
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.importlib import async_import_module
|
from homeassistant.helpers.importlib import async_import_module
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
@ -21,7 +22,14 @@ from homeassistant.util.hass_dict import HassKey
|
|||||||
|
|
||||||
from ..auth_store import AuthStore
|
from ..auth_store import AuthStore
|
||||||
from ..const import MFA_SESSION_EXPIRATION
|
from ..const import MFA_SESSION_EXPIRATION
|
||||||
from ..models import AuthFlowResult, Credentials, RefreshToken, User, UserMeta
|
from ..models import (
|
||||||
|
AuthFlowContext,
|
||||||
|
AuthFlowResult,
|
||||||
|
Credentials,
|
||||||
|
RefreshToken,
|
||||||
|
User,
|
||||||
|
UserMeta,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed")
|
DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed")
|
||||||
@ -97,7 +105,7 @@ class AuthProvider:
|
|||||||
|
|
||||||
# Implement by extending class
|
# Implement by extending class
|
||||||
|
|
||||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||||
"""Return the data flow for logging in with auth provider.
|
"""Return the data flow for logging in with auth provider.
|
||||||
|
|
||||||
Auth provider should extend LoginFlow and return an instance.
|
Auth provider should extend LoginFlow and return an instance.
|
||||||
@ -184,7 +192,7 @@ async def load_auth_provider_module(
|
|||||||
return module
|
return module
|
||||||
|
|
||||||
|
|
||||||
class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
|
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
||||||
"""Handler for the login flow."""
|
"""Handler for the login flow."""
|
||||||
|
|
||||||
_flow_result = AuthFlowResult
|
_flow_result = AuthFlowResult
|
||||||
|
@ -13,7 +13,7 @@ import voluptuous as vol
|
|||||||
from homeassistant.const import CONF_COMMAND
|
from homeassistant.const import CONF_COMMAND
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
from ..models import AuthFlowResult, Credentials, UserMeta
|
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||||
|
|
||||||
CONF_ARGS = "args"
|
CONF_ARGS = "args"
|
||||||
@ -59,7 +59,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return CommandLineLoginFlow(self)
|
return CommandLineLoginFlow(self)
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ from homeassistant.exceptions import HomeAssistantError
|
|||||||
from homeassistant.helpers import issue_registry as ir
|
from homeassistant.helpers import issue_registry as ir
|
||||||
from homeassistant.helpers.storage import Store
|
from homeassistant.helpers.storage import Store
|
||||||
|
|
||||||
from ..models import AuthFlowResult, Credentials, UserMeta
|
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||||
|
|
||||||
STORAGE_VERSION = 1
|
STORAGE_VERSION = 1
|
||||||
@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
|
|||||||
await data.async_load()
|
await data.async_load()
|
||||||
self.data = data
|
self.data = data
|
||||||
|
|
||||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return HassLoginFlow(self)
|
return HassLoginFlow(self)
|
||||||
|
|
||||||
|
@ -4,14 +4,14 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
import hmac
|
import hmac
|
||||||
from typing import Any, cast
|
from typing import cast
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
from ..models import AuthFlowResult, Credentials, UserMeta
|
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||||
|
|
||||||
USER_SCHEMA = vol.Schema(
|
USER_SCHEMA = vol.Schema(
|
||||||
@ -36,7 +36,7 @@ class InvalidAuthError(HomeAssistantError):
|
|||||||
class ExampleAuthProvider(AuthProvider):
|
class ExampleAuthProvider(AuthProvider):
|
||||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||||
|
|
||||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
return ExampleLoginFlow(self)
|
return ExampleLoginFlow(self)
|
||||||
|
|
||||||
|
@ -25,7 +25,13 @@ import homeassistant.helpers.config_validation as cv
|
|||||||
from homeassistant.helpers.network import is_cloud_connection
|
from homeassistant.helpers.network import is_cloud_connection
|
||||||
|
|
||||||
from .. import InvalidAuthError
|
from .. import InvalidAuthError
|
||||||
from ..models import AuthFlowResult, Credentials, RefreshToken, UserMeta
|
from ..models import (
|
||||||
|
AuthFlowContext,
|
||||||
|
AuthFlowResult,
|
||||||
|
Credentials,
|
||||||
|
RefreshToken,
|
||||||
|
UserMeta,
|
||||||
|
)
|
||||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||||
|
|
||||||
type IPAddress = IPv4Address | IPv6Address
|
type IPAddress = IPv4Address | IPv6Address
|
||||||
@ -98,7 +104,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
|||||||
"""Trusted Networks auth provider does not support MFA."""
|
"""Trusted Networks auth provider does not support MFA."""
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||||
"""Return a flow to login."""
|
"""Return a flow to login."""
|
||||||
assert context is not None
|
assert context is not None
|
||||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||||
|
@ -9,6 +9,7 @@ import it.
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
# pylint: disable-next=hass-deprecated-import
|
||||||
from functools import cached_property as _cached_property, partial
|
from functools import cached_property as _cached_property, partial
|
||||||
|
|
||||||
from homeassistant.helpers.deprecation import (
|
from homeassistant.helpers.deprecation import (
|
||||||
|
126
homeassistant/backup_restore.py
Normal file
126
homeassistant/backup_restore.py
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
"""Home Assistant module to handle restoring backups."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from awesomeversion import AwesomeVersion
|
||||||
|
import securetar
|
||||||
|
|
||||||
|
from .const import __version__ as HA_VERSION
|
||||||
|
|
||||||
|
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||||
|
KEEP_PATHS = ("backups",)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RestoreBackupFileContent:
|
||||||
|
"""Definition for restore backup file content."""
|
||||||
|
|
||||||
|
backup_file_path: Path
|
||||||
|
|
||||||
|
|
||||||
|
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||||
|
"""Return the contents of the restore backup file."""
|
||||||
|
instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE)
|
||||||
|
try:
|
||||||
|
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||||
|
return RestoreBackupFileContent(
|
||||||
|
backup_file_path=Path(instruction_content["path"])
|
||||||
|
)
|
||||||
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||||
|
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||||
|
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||||
|
config_contents = sorted(
|
||||||
|
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||||
|
)
|
||||||
|
|
||||||
|
for entry in config_contents:
|
||||||
|
entrypath = config_dir.joinpath(entry)
|
||||||
|
|
||||||
|
if entrypath.is_file():
|
||||||
|
entrypath.unlink()
|
||||||
|
elif entrypath.is_dir():
|
||||||
|
shutil.rmtree(entrypath)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||||
|
"""Extract the backup file to the config directory."""
|
||||||
|
with (
|
||||||
|
TemporaryDirectory() as tempdir,
|
||||||
|
securetar.SecureTarFile(
|
||||||
|
backup_file_path,
|
||||||
|
gzip=False,
|
||||||
|
mode="r",
|
||||||
|
) as ostf,
|
||||||
|
):
|
||||||
|
ostf.extractall(
|
||||||
|
path=Path(tempdir, "extracted"),
|
||||||
|
members=securetar.secure_path(ostf),
|
||||||
|
filter="fully_trusted",
|
||||||
|
)
|
||||||
|
backup_meta_file = Path(tempdir, "extracted", "backup.json")
|
||||||
|
backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8"))
|
||||||
|
|
||||||
|
if (
|
||||||
|
backup_meta_version := AwesomeVersion(
|
||||||
|
backup_meta["homeassistant"]["version"]
|
||||||
|
)
|
||||||
|
) > HA_VERSION:
|
||||||
|
raise ValueError(
|
||||||
|
f"You need at least Home Assistant version {backup_meta_version} to restore this backup"
|
||||||
|
)
|
||||||
|
|
||||||
|
with securetar.SecureTarFile(
|
||||||
|
Path(
|
||||||
|
tempdir,
|
||||||
|
"extracted",
|
||||||
|
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||||
|
),
|
||||||
|
gzip=backup_meta["compressed"],
|
||||||
|
mode="r",
|
||||||
|
) as istf:
|
||||||
|
for member in istf.getmembers():
|
||||||
|
if member.name == "data":
|
||||||
|
continue
|
||||||
|
member.name = member.name.replace("data/", "")
|
||||||
|
_clear_configuration_directory(config_dir)
|
||||||
|
istf.extractall(
|
||||||
|
path=config_dir,
|
||||||
|
members=[
|
||||||
|
member
|
||||||
|
for member in securetar.secure_path(istf)
|
||||||
|
if member.name != "data"
|
||||||
|
],
|
||||||
|
filter="fully_trusted",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def restore_backup(config_dir_path: str) -> bool:
|
||||||
|
"""Restore the backup file if any.
|
||||||
|
|
||||||
|
Returns True if a restore backup file was found and restored, False otherwise.
|
||||||
|
"""
|
||||||
|
config_dir = Path(config_dir_path)
|
||||||
|
if not (restore_content := restore_backup_file_content(config_dir)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||||
|
backup_file_path = restore_content.backup_file_path
|
||||||
|
_LOGGER.info("Restoring %s", backup_file_path)
|
||||||
|
try:
|
||||||
|
_extract_backup(config_dir, backup_file_path)
|
||||||
|
except FileNotFoundError as err:
|
||||||
|
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||||
|
_LOGGER.info("Restore complete, restarting")
|
||||||
|
return True
|
@ -70,6 +70,7 @@ from .const import (
|
|||||||
REQUIRED_NEXT_PYTHON_VER,
|
REQUIRED_NEXT_PYTHON_VER,
|
||||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||||
)
|
)
|
||||||
|
from .core_config import async_process_ha_core_config
|
||||||
from .exceptions import HomeAssistantError
|
from .exceptions import HomeAssistantError
|
||||||
from .helpers import (
|
from .helpers import (
|
||||||
area_registry,
|
area_registry,
|
||||||
@ -479,7 +480,7 @@ async def async_from_config_dict(
|
|||||||
core_config = config.get(core.DOMAIN, {})
|
core_config = config.get(core.DOMAIN, {})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await conf_util.async_process_ha_core_config(hass, core_config)
|
await async_process_ha_core_config(hass, core_config)
|
||||||
except vol.Invalid as config_err:
|
except vol.Invalid as config_err:
|
||||||
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
|
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
|
||||||
async_notify_setup_error(hass, core.DOMAIN)
|
async_notify_setup_error(hass, core.DOMAIN)
|
||||||
@ -514,7 +515,7 @@ async def async_from_config_dict(
|
|||||||
issue_registry.async_create_issue(
|
issue_registry.async_create_issue(
|
||||||
hass,
|
hass,
|
||||||
core.DOMAIN,
|
core.DOMAIN,
|
||||||
"python_version",
|
f"python_version_{required_python_version}",
|
||||||
is_fixable=False,
|
is_fixable=False,
|
||||||
severity=issue_registry.IssueSeverity.WARNING,
|
severity=issue_registry.IssueSeverity.WARNING,
|
||||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
"google_assistant",
|
"google_assistant",
|
||||||
"google_assistant_sdk",
|
"google_assistant_sdk",
|
||||||
"google_cloud",
|
"google_cloud",
|
||||||
"google_domains",
|
|
||||||
"google_generative_ai_conversation",
|
"google_generative_ai_conversation",
|
||||||
"google_mail",
|
"google_mail",
|
||||||
"google_maps",
|
"google_maps",
|
||||||
|
5
homeassistant/brands/husqvarna.json
Normal file
5
homeassistant/brands/husqvarna.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "husqvarna",
|
||||||
|
"name": "Husqvarna",
|
||||||
|
"integrations": ["husqvarna_automower", "husqvarna_automower_ble"]
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"domain": "lg",
|
"domain": "lg",
|
||||||
"name": "LG",
|
"name": "LG",
|
||||||
"integrations": ["lg_netcast", "lg_soundbar", "webostv"]
|
"integrations": ["lg_netcast", "lg_soundbar", "lg_thinq", "webostv"]
|
||||||
}
|
}
|
||||||
|
5
homeassistant/brands/sky.json
Normal file
5
homeassistant/brands/sky.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "sky",
|
||||||
|
"name": "Sky",
|
||||||
|
"integrations": ["sky_hub", "sky_remote"]
|
||||||
|
}
|
@ -4,8 +4,10 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from jaraco.abode.client import Client as Abode
|
from jaraco.abode.client import Client as Abode
|
||||||
|
import jaraco.abode.config
|
||||||
from jaraco.abode.exceptions import (
|
from jaraco.abode.exceptions import (
|
||||||
AuthenticationException as AbodeAuthenticationException,
|
AuthenticationException as AbodeAuthenticationException,
|
||||||
Exception as AbodeException,
|
Exception as AbodeException,
|
||||||
@ -93,6 +95,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
password = entry.data[CONF_PASSWORD]
|
password = entry.data[CONF_PASSWORD]
|
||||||
polling = entry.data[CONF_POLLING]
|
polling = entry.data[CONF_POLLING]
|
||||||
|
|
||||||
|
# Configure abode library to use config directory for storing data
|
||||||
|
jaraco.abode.config.paths.override(user_data=Path(hass.config.path("Abode")))
|
||||||
|
|
||||||
# For previous config entries where unique_id is None
|
# For previous config entries where unique_id is None
|
||||||
if entry.unique_id is None:
|
if entry.unique_id is None:
|
||||||
hass.config_entries.async_update_entry(
|
hass.config_entries.async_update_entry(
|
||||||
|
@ -7,13 +7,9 @@ from jaraco.abode.devices.alarm import Alarm
|
|||||||
from homeassistant.components.alarm_control_panel import (
|
from homeassistant.components.alarm_control_panel import (
|
||||||
AlarmControlPanelEntity,
|
AlarmControlPanelEntity,
|
||||||
AlarmControlPanelEntityFeature,
|
AlarmControlPanelEntityFeature,
|
||||||
|
AlarmControlPanelState,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
|
||||||
STATE_ALARM_ARMED_AWAY,
|
|
||||||
STATE_ALARM_ARMED_HOME,
|
|
||||||
STATE_ALARM_DISARMED,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
@ -44,14 +40,14 @@ class AbodeAlarm(AbodeDevice, AlarmControlPanelEntity):
|
|||||||
_device: Alarm
|
_device: Alarm
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self) -> str | None:
|
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||||
"""Return the state of the device."""
|
"""Return the state of the device."""
|
||||||
if self._device.is_standby:
|
if self._device.is_standby:
|
||||||
return STATE_ALARM_DISARMED
|
return AlarmControlPanelState.DISARMED
|
||||||
if self._device.is_away:
|
if self._device.is_away:
|
||||||
return STATE_ALARM_ARMED_AWAY
|
return AlarmControlPanelState.ARMED_AWAY
|
||||||
if self._device.is_home:
|
if self._device.is_home:
|
||||||
return STATE_ALARM_ARMED_HOME
|
return AlarmControlPanelState.ARMED_HOME
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def alarm_disarm(self, code: str | None = None) -> None:
|
def alarm_disarm(self, code: str | None = None) -> None:
|
||||||
|
@ -102,15 +102,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
existing_entry = await self.async_set_unique_id(self._username)
|
existing_entry = await self.async_set_unique_id(self._username)
|
||||||
|
|
||||||
if existing_entry:
|
if existing_entry:
|
||||||
self.hass.config_entries.async_update_entry(
|
return self.async_update_reload_and_abort(existing_entry, data=config_data)
|
||||||
existing_entry, data=config_data
|
|
||||||
)
|
|
||||||
# Reload the Abode config entry otherwise devices will remain unavailable
|
|
||||||
self.hass.async_create_task(
|
|
||||||
self.hass.config_entries.async_reload(existing_entry.entry_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_abort(reason="reauth_successful")
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=cast(str, self._username), data=config_data
|
title=cast(str, self._username), data=config_data
|
||||||
@ -120,9 +112,6 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle a flow initialized by the user."""
|
"""Handle a flow initialized by the user."""
|
||||||
if self._async_current_entries():
|
|
||||||
return self.async_abort(reason="single_instance_allowed")
|
|
||||||
|
|
||||||
if user_input is None:
|
if user_input is None:
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=vol.Schema(self.data_schema)
|
step_id="user", data_schema=vol.Schema(self.data_schema)
|
||||||
|
@ -9,5 +9,6 @@
|
|||||||
},
|
},
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["jaraco.abode", "lomond"],
|
"loggers": ["jaraco.abode", "lomond"],
|
||||||
"requirements": ["jaraco.abode==6.2.1"]
|
"requirements": ["jaraco.abode==6.2.1"],
|
||||||
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,6 @@
|
|||||||
"invalid_mfa_code": "Invalid MFA code"
|
"invalid_mfa_code": "Invalid MFA code"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
31
homeassistant/components/acaia/__init__.py
Normal file
31
homeassistant/components/acaia/__init__.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Initialize the Acaia component."""
|
||||||
|
|
||||||
|
from homeassistant.const import Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||||
|
|
||||||
|
PLATFORMS = [
|
||||||
|
Platform.BINARY_SENSOR,
|
||||||
|
Platform.BUTTON,
|
||||||
|
Platform.SENSOR,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||||
|
"""Set up acaia as config entry."""
|
||||||
|
|
||||||
|
coordinator = AcaiaCoordinator(hass, entry)
|
||||||
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
58
homeassistant/components/acaia/binary_sensor.py
Normal file
58
homeassistant/components/acaia/binary_sensor.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
"""Binary sensor platform for Acaia scales."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
|
|
||||||
|
from homeassistant.components.binary_sensor import (
|
||||||
|
BinarySensorDeviceClass,
|
||||||
|
BinarySensorEntity,
|
||||||
|
BinarySensorEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry
|
||||||
|
from .entity import AcaiaEntity
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||||
|
"""Description for Acaia binary sensor entities."""
|
||||||
|
|
||||||
|
is_on_fn: Callable[[AcaiaScale], bool]
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = (
|
||||||
|
AcaiaBinarySensorEntityDescription(
|
||||||
|
key="timer_running",
|
||||||
|
translation_key="timer_running",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
is_on_fn=lambda scale: scale.timer_running,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up binary sensors."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
async_add_entities(
|
||||||
|
AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity):
|
||||||
|
"""Representation of an Acaia binary sensor."""
|
||||||
|
|
||||||
|
entity_description: AcaiaBinarySensorEntityDescription
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return true if the binary sensor is on."""
|
||||||
|
return self.entity_description.is_on_fn(self._scale)
|
63
homeassistant/components/acaia/button.py
Normal file
63
homeassistant/components/acaia/button.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
"""Button entities for Acaia scales."""
|
||||||
|
|
||||||
|
from collections.abc import Callable, Coroutine
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
|
|
||||||
|
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry
|
||||||
|
from .entity import AcaiaEntity
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||||
|
"""Description for acaia button entities."""
|
||||||
|
|
||||||
|
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||||
|
|
||||||
|
|
||||||
|
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||||
|
AcaiaButtonEntityDescription(
|
||||||
|
key="tare",
|
||||||
|
translation_key="tare",
|
||||||
|
press_fn=lambda scale: scale.tare(),
|
||||||
|
),
|
||||||
|
AcaiaButtonEntityDescription(
|
||||||
|
key="reset_timer",
|
||||||
|
translation_key="reset_timer",
|
||||||
|
press_fn=lambda scale: scale.reset_timer(),
|
||||||
|
),
|
||||||
|
AcaiaButtonEntityDescription(
|
||||||
|
key="start_stop",
|
||||||
|
translation_key="start_stop",
|
||||||
|
press_fn=lambda scale: scale.start_stop_timer(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up button entities and services."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||||
|
"""Representation of an Acaia button."""
|
||||||
|
|
||||||
|
entity_description: AcaiaButtonEntityDescription
|
||||||
|
|
||||||
|
async def async_press(self) -> None:
|
||||||
|
"""Handle the button press."""
|
||||||
|
await self.entity_description.press_fn(self._scale)
|
149
homeassistant/components/acaia/config_flow.py
Normal file
149
homeassistant/components/acaia/config_flow.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
"""Config flow for Acaia integration."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||||
|
from aioacaia.helpers import is_new_scale
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.components.bluetooth import (
|
||||||
|
BluetoothServiceInfoBleak,
|
||||||
|
async_discovered_service_info,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||||
|
from homeassistant.helpers.device_registry import format_mac
|
||||||
|
from homeassistant.helpers.selector import (
|
||||||
|
SelectOptionDict,
|
||||||
|
SelectSelector,
|
||||||
|
SelectSelectorConfig,
|
||||||
|
SelectSelectorMode,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for acaia."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the config flow."""
|
||||||
|
self._discovered: dict[str, Any] = {}
|
||||||
|
self._discovered_devices: dict[str, str] = {}
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a flow initialized by the user."""
|
||||||
|
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
mac = format_mac(user_input[CONF_ADDRESS])
|
||||||
|
try:
|
||||||
|
is_new_style_scale = await is_new_scale(mac)
|
||||||
|
except AcaiaDeviceNotFound:
|
||||||
|
errors["base"] = "device_not_found"
|
||||||
|
except AcaiaError:
|
||||||
|
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
except AcaiaUnknownDevice:
|
||||||
|
return self.async_abort(reason="unsupported_device")
|
||||||
|
else:
|
||||||
|
await self.async_set_unique_id(mac)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=self._discovered_devices[user_input[CONF_ADDRESS]],
|
||||||
|
data={
|
||||||
|
CONF_ADDRESS: mac,
|
||||||
|
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
for device in async_discovered_service_info(self.hass):
|
||||||
|
self._discovered_devices[device.address] = device.name
|
||||||
|
|
||||||
|
if not self._discovered_devices:
|
||||||
|
return self.async_abort(reason="no_devices_found")
|
||||||
|
|
||||||
|
options = [
|
||||||
|
SelectOptionDict(
|
||||||
|
value=device_mac,
|
||||||
|
label=f"{device_name} ({device_mac})",
|
||||||
|
)
|
||||||
|
for device_mac, device_name in self._discovered_devices.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||||
|
SelectSelectorConfig(
|
||||||
|
options=options,
|
||||||
|
mode=SelectSelectorMode.DROPDOWN,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_bluetooth(
|
||||||
|
self, discovery_info: BluetoothServiceInfoBleak
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a discovered Bluetooth device."""
|
||||||
|
|
||||||
|
self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address)
|
||||||
|
self._discovered[CONF_NAME] = discovery_info.name
|
||||||
|
|
||||||
|
await self.async_set_unique_id(mac)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||||
|
discovery_info.address
|
||||||
|
)
|
||||||
|
except AcaiaDeviceNotFound:
|
||||||
|
_LOGGER.debug("Device not found during discovery")
|
||||||
|
return self.async_abort(reason="device_not_found")
|
||||||
|
except AcaiaError:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Error occurred while connecting to the scale during discovery",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return self.async_abort(reason="unknown")
|
||||||
|
except AcaiaUnknownDevice:
|
||||||
|
_LOGGER.debug("Unsupported device during discovery")
|
||||||
|
return self.async_abort(reason="unsupported_device")
|
||||||
|
|
||||||
|
return await self.async_step_bluetooth_confirm()
|
||||||
|
|
||||||
|
async def async_step_bluetooth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle confirmation of Bluetooth discovery."""
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=self._discovered[CONF_NAME],
|
||||||
|
data={
|
||||||
|
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||||
|
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.context["title_placeholders"] = placeholders = {
|
||||||
|
CONF_NAME: self._discovered[CONF_NAME]
|
||||||
|
}
|
||||||
|
|
||||||
|
self._set_confirm_only()
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="bluetooth_confirm",
|
||||||
|
description_placeholders=placeholders,
|
||||||
|
)
|
4
homeassistant/components/acaia/const.py
Normal file
4
homeassistant/components/acaia/const.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
"""Constants for component."""
|
||||||
|
|
||||||
|
DOMAIN = "acaia"
|
||||||
|
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
86
homeassistant/components/acaia/coordinator.py
Normal file
86
homeassistant/components/acaia/coordinator.py
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
"""Coordinator for Acaia integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
|
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.const import CONF_ADDRESS
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
|
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||||
|
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=15)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||||
|
"""Class to handle fetching data from the scale."""
|
||||||
|
|
||||||
|
config_entry: AcaiaConfigEntry
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||||
|
"""Initialize coordinator."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name="acaia coordinator",
|
||||||
|
update_interval=SCAN_INTERVAL,
|
||||||
|
config_entry=entry,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._scale = AcaiaScale(
|
||||||
|
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||||
|
name=entry.title,
|
||||||
|
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||||
|
notify_callback=self.async_update_listeners,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scale(self) -> AcaiaScale:
|
||||||
|
"""Return the scale object."""
|
||||||
|
return self._scale
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> None:
|
||||||
|
"""Fetch data."""
|
||||||
|
|
||||||
|
# scale is already connected, return
|
||||||
|
if self._scale.connected:
|
||||||
|
return
|
||||||
|
|
||||||
|
# scale is not connected, try to connect
|
||||||
|
try:
|
||||||
|
await self._scale.connect(setup_tasks=False)
|
||||||
|
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Could not connect to scale: %s, Error: %s",
|
||||||
|
self.config_entry.data[CONF_ADDRESS],
|
||||||
|
ex,
|
||||||
|
)
|
||||||
|
self._scale.device_disconnected_handler(notify=False)
|
||||||
|
return
|
||||||
|
|
||||||
|
# connected, set up background tasks
|
||||||
|
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||||
|
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||||
|
hass=self.hass,
|
||||||
|
target=self._scale.send_heartbeats(),
|
||||||
|
name="acaia_heartbeat_task",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||||
|
self._scale.process_queue_task = (
|
||||||
|
self.config_entry.async_create_background_task(
|
||||||
|
hass=self.hass,
|
||||||
|
target=self._scale.process_queue(),
|
||||||
|
name="acaia_process_queue_task",
|
||||||
|
)
|
||||||
|
)
|
31
homeassistant/components/acaia/diagnostics.py
Normal file
31
homeassistant/components/acaia/diagnostics.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Diagnostics support for Acaia."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import asdict
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import AcaiaConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
scale = coordinator.scale
|
||||||
|
|
||||||
|
# collect all data sources
|
||||||
|
return {
|
||||||
|
"model": scale.model,
|
||||||
|
"device_state": (
|
||||||
|
asdict(scale.device_state) if scale.device_state is not None else ""
|
||||||
|
),
|
||||||
|
"mac": scale.mac,
|
||||||
|
"last_disconnect_time": scale.last_disconnect_time,
|
||||||
|
"timer": scale.timer,
|
||||||
|
"weight": scale.weight,
|
||||||
|
}
|
40
homeassistant/components/acaia/entity.py
Normal file
40
homeassistant/components/acaia/entity.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
"""Base class for Acaia entities."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AcaiaCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||||
|
"""Common elements for all entities."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AcaiaCoordinator,
|
||||||
|
entity_description: EntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the entity."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self.entity_description = entity_description
|
||||||
|
self._scale = coordinator.scale
|
||||||
|
self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}"
|
||||||
|
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self._scale.mac)},
|
||||||
|
manufacturer="Acaia",
|
||||||
|
model=self._scale.model,
|
||||||
|
suggested_area="Kitchen",
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Returns whether entity is available."""
|
||||||
|
return super().available and self._scale.connected
|
24
homeassistant/components/acaia/icons.json
Normal file
24
homeassistant/components/acaia/icons.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"timer_running": {
|
||||||
|
"default": "mdi:timer",
|
||||||
|
"state": {
|
||||||
|
"on": "mdi:timer-play",
|
||||||
|
"off": "mdi:timer-off"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"button": {
|
||||||
|
"tare": {
|
||||||
|
"default": "mdi:scale-balance"
|
||||||
|
},
|
||||||
|
"reset_timer": {
|
||||||
|
"default": "mdi:timer-refresh"
|
||||||
|
},
|
||||||
|
"start_stop": {
|
||||||
|
"default": "mdi:timer-play"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
29
homeassistant/components/acaia/manifest.json
Normal file
29
homeassistant/components/acaia/manifest.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"domain": "acaia",
|
||||||
|
"name": "Acaia",
|
||||||
|
"bluetooth": [
|
||||||
|
{
|
||||||
|
"manufacturer_id": 16962
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "ACAIA*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "PYXIS-*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "LUNAR-*"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"local_name": "PROCHBT001"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"codeowners": ["@zweckj"],
|
||||||
|
"config_flow": true,
|
||||||
|
"dependencies": ["bluetooth_adapters"],
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||||
|
"integration_type": "device",
|
||||||
|
"iot_class": "local_push",
|
||||||
|
"loggers": ["aioacaia"],
|
||||||
|
"requirements": ["aioacaia==0.1.9"]
|
||||||
|
}
|
143
homeassistant/components/acaia/sensor.py
Normal file
143
homeassistant/components/acaia/sensor.py
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
"""Sensor platform for Acaia."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale
|
||||||
|
from aioacaia.const import UnitMass as AcaiaUnitOfMass
|
||||||
|
|
||||||
|
from homeassistant.components.sensor import (
|
||||||
|
RestoreSensor,
|
||||||
|
SensorDeviceClass,
|
||||||
|
SensorEntity,
|
||||||
|
SensorEntityDescription,
|
||||||
|
SensorExtraStoredData,
|
||||||
|
SensorStateClass,
|
||||||
|
)
|
||||||
|
from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AcaiaConfigEntry
|
||||||
|
from .entity import AcaiaEntity
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaSensorEntityDescription(SensorEntityDescription):
|
||||||
|
"""Description for Acaia sensor entities."""
|
||||||
|
|
||||||
|
value_fn: Callable[[AcaiaScale], int | float | None]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(kw_only=True, frozen=True)
|
||||||
|
class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription):
|
||||||
|
"""Description for Acaia sensor entities with dynamic units."""
|
||||||
|
|
||||||
|
unit_fn: Callable[[AcaiaDeviceState], str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
|
||||||
|
AcaiaDynamicUnitSensorEntityDescription(
|
||||||
|
key="weight",
|
||||||
|
device_class=SensorDeviceClass.WEIGHT,
|
||||||
|
native_unit_of_measurement=UnitOfMass.GRAMS,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
unit_fn=lambda data: (
|
||||||
|
UnitOfMass.OUNCES
|
||||||
|
if data.units == AcaiaUnitOfMass.OUNCES
|
||||||
|
else UnitOfMass.GRAMS
|
||||||
|
),
|
||||||
|
value_fn=lambda scale: scale.weight,
|
||||||
|
),
|
||||||
|
AcaiaDynamicUnitSensorEntityDescription(
|
||||||
|
key="flow_rate",
|
||||||
|
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||||
|
native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
|
||||||
|
suggested_display_precision=1,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda scale: scale.flow_rate,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
|
||||||
|
AcaiaSensorEntityDescription(
|
||||||
|
key="battery",
|
||||||
|
device_class=SensorDeviceClass.BATTERY,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
value_fn=lambda scale: (
|
||||||
|
scale.device_state.battery_level if scale.device_state else None
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AcaiaConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up sensors."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
entities: list[SensorEntity] = [
|
||||||
|
AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS
|
||||||
|
]
|
||||||
|
entities.extend(
|
||||||
|
AcaiaRestoreSensor(coordinator, entity_description)
|
||||||
|
for entity_description in RESTORE_SENSORS
|
||||||
|
)
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaSensor(AcaiaEntity, SensorEntity):
|
||||||
|
"""Representation of an Acaia sensor."""
|
||||||
|
|
||||||
|
entity_description: AcaiaDynamicUnitSensorEntityDescription
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_unit_of_measurement(self) -> str | None:
|
||||||
|
"""Return the unit of measurement of this entity."""
|
||||||
|
if (
|
||||||
|
self._scale.device_state is not None
|
||||||
|
and self.entity_description.unit_fn is not None
|
||||||
|
):
|
||||||
|
return self.entity_description.unit_fn(self._scale.device_state)
|
||||||
|
return self.entity_description.native_unit_of_measurement
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> int | float | None:
|
||||||
|
"""Return the state of the entity."""
|
||||||
|
return self.entity_description.value_fn(self._scale)
|
||||||
|
|
||||||
|
|
||||||
|
class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor):
|
||||||
|
"""Representation of an Acaia sensor with restore capabilities."""
|
||||||
|
|
||||||
|
entity_description: AcaiaSensorEntityDescription
|
||||||
|
_restored_data: SensorExtraStoredData | None = None
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Handle entity which will be added."""
|
||||||
|
await super().async_added_to_hass()
|
||||||
|
|
||||||
|
self._restored_data = await self.async_get_last_sensor_data()
|
||||||
|
if self._restored_data is not None:
|
||||||
|
self._attr_native_value = self._restored_data.native_value
|
||||||
|
self._attr_native_unit_of_measurement = (
|
||||||
|
self._restored_data.native_unit_of_measurement
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._scale.device_state is not None:
|
||||||
|
self._attr_native_value = self.entity_description.value_fn(self._scale)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Handle updated data from the coordinator."""
|
||||||
|
if self._scale.device_state is not None:
|
||||||
|
self._attr_native_value = self.entity_description.value_fn(self._scale)
|
||||||
|
self._async_write_ha_state()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return True if entity is available."""
|
||||||
|
return super().available or self._restored_data is not None
|
46
homeassistant/components/acaia/strings.json
Normal file
46
homeassistant/components/acaia/strings.json
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"flow_title": "{name}",
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||||
|
"unsupported_device": "This device is not supported."
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"device_not_found": "Device could not be found.",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"bluetooth_confirm": {
|
||||||
|
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||||
|
"data": {
|
||||||
|
"address": "[%key:common::config_flow::data::device%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"address": "Select Acaia scale you want to set up"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"timer_running": {
|
||||||
|
"name": "Timer running"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"button": {
|
||||||
|
"tare": {
|
||||||
|
"name": "Tare"
|
||||||
|
},
|
||||||
|
"reset_timer": {
|
||||||
|
"name": "Reset timer"
|
||||||
|
},
|
||||||
|
"start_stop": {
|
||||||
|
"name": "Start/stop timer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -2,13 +2,11 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from accuweather import AccuWeather
|
from accuweather import AccuWeather
|
||||||
|
|
||||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
@ -16,7 +14,9 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
|
|
||||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
|
AccuWeatherConfigEntry,
|
||||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||||
|
AccuWeatherData,
|
||||||
AccuWeatherObservationDataUpdateCoordinator,
|
AccuWeatherObservationDataUpdateCoordinator,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -25,17 +25,6 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AccuWeatherData:
|
|
||||||
"""Data for AccuWeather integration."""
|
|
||||||
|
|
||||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
|
||||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
|
||||||
|
|
||||||
|
|
||||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||||
"""Set up AccuWeather as config entry."""
|
"""Set up AccuWeather as config entry."""
|
||||||
api_key: str = entry.data[CONF_API_KEY]
|
api_key: str = entry.data[CONF_API_KEY]
|
||||||
@ -50,6 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
|||||||
|
|
||||||
coordinator_observation = AccuWeatherObservationDataUpdateCoordinator(
|
coordinator_observation = AccuWeatherObservationDataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
|
entry,
|
||||||
accuweather,
|
accuweather,
|
||||||
name,
|
name,
|
||||||
"observation",
|
"observation",
|
||||||
@ -58,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
|||||||
|
|
||||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
|
entry,
|
||||||
accuweather,
|
accuweather,
|
||||||
name,
|
name,
|
||||||
"daily forecast",
|
"daily forecast",
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
"""The AccuWeather coordinator."""
|
"""The AccuWeather coordinator."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
|
from dataclasses import dataclass
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
@ -8,6 +11,7 @@ from typing import TYPE_CHECKING, Any
|
|||||||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||||
from aiohttp.client_exceptions import ClientConnectorError
|
from aiohttp.client_exceptions import ClientConnectorError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
from homeassistant.helpers.update_coordinator import (
|
from homeassistant.helpers.update_coordinator import (
|
||||||
@ -23,6 +27,17 @@ EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceed
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AccuWeatherData:
|
||||||
|
"""Data for AccuWeather integration."""
|
||||||
|
|
||||||
|
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||||
|
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||||
|
|
||||||
|
|
||||||
class AccuWeatherObservationDataUpdateCoordinator(
|
class AccuWeatherObservationDataUpdateCoordinator(
|
||||||
DataUpdateCoordinator[dict[str, Any]]
|
DataUpdateCoordinator[dict[str, Any]]
|
||||||
):
|
):
|
||||||
@ -31,6 +46,7 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
config_entry: AccuWeatherConfigEntry,
|
||||||
accuweather: AccuWeather,
|
accuweather: AccuWeather,
|
||||||
name: str,
|
name: str,
|
||||||
coordinator_type: str,
|
coordinator_type: str,
|
||||||
@ -48,6 +64,7 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=config_entry,
|
||||||
name=f"{name} ({coordinator_type})",
|
name=f"{name} ({coordinator_type})",
|
||||||
update_interval=update_interval,
|
update_interval=update_interval,
|
||||||
)
|
)
|
||||||
@ -73,6 +90,7 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
config_entry: AccuWeatherConfigEntry,
|
||||||
accuweather: AccuWeather,
|
accuweather: AccuWeather,
|
||||||
name: str,
|
name: str,
|
||||||
coordinator_type: str,
|
coordinator_type: str,
|
||||||
@ -90,6 +108,7 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=config_entry,
|
||||||
name=f"{name} ({coordinator_type})",
|
name=f"{name} ({coordinator_type})",
|
||||||
update_interval=update_interval,
|
update_interval=update_interval,
|
||||||
)
|
)
|
||||||
|
@ -8,7 +8,7 @@ from homeassistant.components.diagnostics import async_redact_data
|
|||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from . import AccuWeatherConfigEntry, AccuWeatherData
|
from .coordinator import AccuWeatherConfigEntry, AccuWeatherData
|
||||||
|
|
||||||
TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE}
|
TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE}
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["accuweather"],
|
"loggers": ["accuweather"],
|
||||||
"quality_scale": "platinum",
|
"requirements": ["accuweather==4.0.0"],
|
||||||
"requirements": ["accuweather==3.0.0"],
|
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,6 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from . import AccuWeatherConfigEntry
|
|
||||||
from .const import (
|
from .const import (
|
||||||
API_METRIC,
|
API_METRIC,
|
||||||
ATTR_CATEGORY,
|
ATTR_CATEGORY,
|
||||||
@ -41,6 +40,7 @@ from .const import (
|
|||||||
MAX_FORECAST_DAYS,
|
MAX_FORECAST_DAYS,
|
||||||
)
|
)
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
|
AccuWeatherConfigEntry,
|
||||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||||
AccuWeatherObservationDataUpdateCoordinator,
|
AccuWeatherObservationDataUpdateCoordinator,
|
||||||
)
|
)
|
||||||
|
@ -9,8 +9,8 @@ from accuweather.const import ENDPOINT
|
|||||||
from homeassistant.components import system_health
|
from homeassistant.components import system_health
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
|
||||||
from . import AccuWeatherConfigEntry
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
from .coordinator import AccuWeatherConfigEntry
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
|
@ -33,7 +33,6 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.util.dt import utc_from_timestamp
|
from homeassistant.util.dt import utc_from_timestamp
|
||||||
|
|
||||||
from . import AccuWeatherConfigEntry, AccuWeatherData
|
|
||||||
from .const import (
|
from .const import (
|
||||||
API_METRIC,
|
API_METRIC,
|
||||||
ATTR_DIRECTION,
|
ATTR_DIRECTION,
|
||||||
@ -43,7 +42,9 @@ from .const import (
|
|||||||
CONDITION_MAP,
|
CONDITION_MAP,
|
||||||
)
|
)
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
|
AccuWeatherConfigEntry,
|
||||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||||
|
AccuWeatherData,
|
||||||
AccuWeatherObservationDataUpdateCoordinator,
|
AccuWeatherObservationDataUpdateCoordinator,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -4,5 +4,6 @@
|
|||||||
"codeowners": [],
|
"codeowners": [],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
|
"quality_scale": "legacy",
|
||||||
"requirements": ["pyserial==3.5"]
|
"requirements": ["pyserial==3.5"]
|
||||||
}
|
}
|
||||||
|
@ -3,5 +3,6 @@
|
|||||||
"name": "Actiontec",
|
"name": "Actiontec",
|
||||||
"codeowners": [],
|
"codeowners": [],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/actiontec",
|
"documentation": "https://www.home-assistant.io/integrations/actiontec",
|
||||||
"iot_class": "local_polling"
|
"iot_class": "local_polling",
|
||||||
|
"quality_scale": "legacy"
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,6 @@ from typing import Any
|
|||||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.hassio import HassioServiceInfo
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_HOST,
|
CONF_HOST,
|
||||||
@ -18,6 +17,7 @@ from homeassistant.const import (
|
|||||||
CONF_VERIFY_SSL,
|
CONF_VERIFY_SSL,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
|
@ -5,5 +5,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/ads",
|
"documentation": "https://www.home-assistant.io/integrations/ads",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyads"],
|
"loggers": ["pyads"],
|
||||||
|
"quality_scale": "legacy",
|
||||||
"requirements": ["pyads==3.4.0"]
|
"requirements": ["pyads==3.4.0"]
|
||||||
}
|
}
|
||||||
|
@ -55,6 +55,7 @@ async def async_setup_entry(
|
|||||||
coordinator = DataUpdateCoordinator(
|
coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name="Advantage Air",
|
name="Advantage Air",
|
||||||
update_method=async_get,
|
update_method=async_get,
|
||||||
update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL),
|
update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL),
|
||||||
|
@ -6,6 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
|
"documentation": "https://www.home-assistant.io/integrations/advantage_air",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["advantage_air"],
|
"loggers": ["advantage_air"],
|
||||||
"quality_scale": "platinum",
|
|
||||||
"requirements": ["advantage-air==0.4.4"]
|
"requirements": ["advantage-air==0.4.4"]
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
"""The AEMET OpenData component."""
|
"""The AEMET OpenData component."""
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||||
from aemet_opendata.interface import AEMET, ConnectionOptions
|
from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||||
@ -13,20 +12,10 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
|||||||
from homeassistant.helpers import aiohttp_client
|
from homeassistant.helpers import aiohttp_client
|
||||||
|
|
||||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||||
from .coordinator import WeatherUpdateCoordinator
|
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AemetData:
|
|
||||||
"""Aemet runtime data."""
|
|
||||||
|
|
||||||
name: str
|
|
||||||
coordinator: WeatherUpdateCoordinator
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool:
|
||||||
"""Set up AEMET OpenData as config entry."""
|
"""Set up AEMET OpenData as config entry."""
|
||||||
@ -34,9 +23,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
|||||||
api_key = entry.data[CONF_API_KEY]
|
api_key = entry.data[CONF_API_KEY]
|
||||||
latitude = entry.data[CONF_LATITUDE]
|
latitude = entry.data[CONF_LATITUDE]
|
||||||
longitude = entry.data[CONF_LONGITUDE]
|
longitude = entry.data[CONF_LONGITUDE]
|
||||||
station_updates = entry.options.get(CONF_STATION_UPDATES, True)
|
update_features: int = UpdateFeature.FORECAST
|
||||||
|
if entry.options.get(CONF_STATION_UPDATES, True):
|
||||||
|
update_features |= UpdateFeature.STATION
|
||||||
|
|
||||||
options = ConnectionOptions(api_key, station_updates)
|
options = ConnectionOptions(api_key, update_features)
|
||||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||||
try:
|
try:
|
||||||
await aemet.select_coordinates(latitude, longitude)
|
await aemet.select_coordinates(latitude, longitude)
|
||||||
@ -46,7 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
|||||||
except AemetError as err:
|
except AemetError as err:
|
||||||
raise ConfigEntryNotReady(err) from err
|
raise ConfigEntryNotReady(err) from err
|
||||||
|
|
||||||
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
|
weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet)
|
||||||
await weather_coordinator.async_config_entry_first_refresh()
|
await weather_coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator)
|
entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator)
|
||||||
|
@ -45,7 +45,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
options = ConnectionOptions(user_input[CONF_API_KEY], False)
|
options = ConnectionOptions(user_input[CONF_API_KEY])
|
||||||
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
|
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
|
||||||
try:
|
try:
|
||||||
await aemet.select_coordinates(latitude, longitude)
|
await aemet.select_coordinates(latitude, longitude)
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from asyncio import timeout
|
from asyncio import timeout
|
||||||
|
from dataclasses import dataclass
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Final, cast
|
from typing import Any, Final, cast
|
||||||
@ -19,6 +20,7 @@ from aemet_opendata.helpers import dict_nested_value
|
|||||||
from aemet_opendata.interface import AEMET
|
from aemet_opendata.interface import AEMET
|
||||||
|
|
||||||
from homeassistant.components.weather import Forecast
|
from homeassistant.components.weather import Forecast
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
@ -29,6 +31,16 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
API_TIMEOUT: Final[int] = 120
|
API_TIMEOUT: Final[int] = 120
|
||||||
WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
|
WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
|
||||||
|
|
||||||
|
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AemetData:
|
||||||
|
"""Aemet runtime data."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
coordinator: WeatherUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||||
"""Weather data update coordinator."""
|
"""Weather data update coordinator."""
|
||||||
@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
entry: AemetConfigEntry,
|
||||||
aemet: AEMET,
|
aemet: AEMET,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize coordinator."""
|
"""Initialize coordinator."""
|
||||||
@ -44,6 +57,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=WEATHER_UPDATE_INTERVAL,
|
update_interval=WEATHER_UPDATE_INTERVAL,
|
||||||
)
|
)
|
||||||
|
@ -15,7 +15,7 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from . import AemetConfigEntry
|
from .coordinator import AemetConfigEntry
|
||||||
|
|
||||||
TO_REDACT_CONFIG = [
|
TO_REDACT_CONFIG = [
|
||||||
CONF_API_KEY,
|
CONF_API_KEY,
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aemet_opendata"],
|
"loggers": ["aemet_opendata"],
|
||||||
"requirements": ["AEMET-OpenData==0.5.4"]
|
"requirements": ["AEMET-OpenData==0.6.3"]
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,6 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from . import AemetConfigEntry
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_API_CONDITION,
|
ATTR_API_CONDITION,
|
||||||
ATTR_API_FORECAST_CONDITION,
|
ATTR_API_FORECAST_CONDITION,
|
||||||
@ -87,7 +86,7 @@ from .const import (
|
|||||||
ATTR_API_WIND_SPEED,
|
ATTR_API_WIND_SPEED,
|
||||||
CONDITIONS_MAP,
|
CONDITIONS_MAP,
|
||||||
)
|
)
|
||||||
from .coordinator import WeatherUpdateCoordinator
|
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||||
from .entity import AemetEntity
|
from .entity import AemetEntity
|
||||||
|
|
||||||
|
|
||||||
@ -249,6 +248,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||||||
name="Rain",
|
name="Rain",
|
||||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
AemetSensorEntityDescription(
|
AemetSensorEntityDescription(
|
||||||
key=ATTR_API_RAIN_PROB,
|
key=ATTR_API_RAIN_PROB,
|
||||||
@ -263,6 +263,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||||||
name="Snow",
|
name="Snow",
|
||||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
AemetSensorEntityDescription(
|
AemetSensorEntityDescription(
|
||||||
key=ATTR_API_SNOW_PROB,
|
key=ATTR_API_SNOW_PROB,
|
||||||
|
@ -27,9 +27,8 @@ from homeassistant.const import (
|
|||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
from . import AemetConfigEntry
|
|
||||||
from .const import CONDITIONS_MAP
|
from .const import CONDITIONS_MAP
|
||||||
from .coordinator import WeatherUpdateCoordinator
|
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||||
from .entity import AemetEntity
|
from .entity import AemetEntity
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,12 +5,7 @@ from __future__ import annotations
|
|||||||
from homeassistant.components.alarm_control_panel import (
|
from homeassistant.components.alarm_control_panel import (
|
||||||
AlarmControlPanelEntity,
|
AlarmControlPanelEntity,
|
||||||
AlarmControlPanelEntityFeature,
|
AlarmControlPanelEntityFeature,
|
||||||
)
|
AlarmControlPanelState,
|
||||||
from homeassistant.const import (
|
|
||||||
STATE_ALARM_ARMED_AWAY,
|
|
||||||
STATE_ALARM_ARMED_HOME,
|
|
||||||
STATE_ALARM_ARMED_NIGHT,
|
|
||||||
STATE_ALARM_DISARMED,
|
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
@ -65,37 +60,37 @@ class AgentBaseStation(AlarmControlPanelEntity):
|
|||||||
self._attr_available = self._client.is_available
|
self._attr_available = self._client.is_available
|
||||||
armed = self._client.is_armed
|
armed = self._client.is_armed
|
||||||
if armed is None:
|
if armed is None:
|
||||||
self._attr_state = None
|
self._attr_alarm_state = None
|
||||||
return
|
return
|
||||||
if armed:
|
if armed:
|
||||||
prof = (await self._client.get_active_profile()).lower()
|
prof = (await self._client.get_active_profile()).lower()
|
||||||
self._attr_state = STATE_ALARM_ARMED_AWAY
|
self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY
|
||||||
if prof == CONF_HOME_MODE_NAME:
|
if prof == CONF_HOME_MODE_NAME:
|
||||||
self._attr_state = STATE_ALARM_ARMED_HOME
|
self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME
|
||||||
elif prof == CONF_NIGHT_MODE_NAME:
|
elif prof == CONF_NIGHT_MODE_NAME:
|
||||||
self._attr_state = STATE_ALARM_ARMED_NIGHT
|
self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT
|
||||||
else:
|
else:
|
||||||
self._attr_state = STATE_ALARM_DISARMED
|
self._attr_alarm_state = AlarmControlPanelState.DISARMED
|
||||||
|
|
||||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||||
"""Send disarm command."""
|
"""Send disarm command."""
|
||||||
await self._client.disarm()
|
await self._client.disarm()
|
||||||
self._attr_state = STATE_ALARM_DISARMED
|
self._attr_alarm_state = AlarmControlPanelState.DISARMED
|
||||||
|
|
||||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||||
"""Send arm away command. Uses custom mode."""
|
"""Send arm away command. Uses custom mode."""
|
||||||
await self._client.arm()
|
await self._client.arm()
|
||||||
await self._client.set_active_profile(CONF_AWAY_MODE_NAME)
|
await self._client.set_active_profile(CONF_AWAY_MODE_NAME)
|
||||||
self._attr_state = STATE_ALARM_ARMED_AWAY
|
self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY
|
||||||
|
|
||||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||||
"""Send arm home command. Uses custom mode."""
|
"""Send arm home command. Uses custom mode."""
|
||||||
await self._client.arm()
|
await self._client.arm()
|
||||||
await self._client.set_active_profile(CONF_HOME_MODE_NAME)
|
await self._client.set_active_profile(CONF_HOME_MODE_NAME)
|
||||||
self._attr_state = STATE_ALARM_ARMED_HOME
|
self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME
|
||||||
|
|
||||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||||
"""Send arm night command. Uses custom mode."""
|
"""Send arm night command. Uses custom mode."""
|
||||||
await self._client.arm()
|
await self._client.arm()
|
||||||
await self._client.set_active_profile(CONF_NIGHT_MODE_NAME)
|
await self._client.set_active_profile(CONF_NIGHT_MODE_NAME)
|
||||||
self._attr_state = STATE_ALARM_ARMED_NIGHT
|
self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["agent"],
|
"loggers": ["agent"],
|
||||||
"requirements": ["agent-py==0.0.23"]
|
"requirements": ["agent-py==0.0.24"]
|
||||||
}
|
}
|
||||||
|
@ -9,9 +9,10 @@ from typing import TYPE_CHECKING
|
|||||||
from airgradient import AirGradientClient, AirGradientError, Config, Measures
|
from airgradient import AirGradientClient, AirGradientError, Config, Measures
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import LOGGER
|
from .const import DOMAIN, LOGGER
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from . import AirGradientConfigEntry
|
from . import AirGradientConfigEntry
|
||||||
@ -29,6 +30,7 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
|||||||
"""Class to manage fetching AirGradient data."""
|
"""Class to manage fetching AirGradient data."""
|
||||||
|
|
||||||
config_entry: AirGradientConfigEntry
|
config_entry: AirGradientConfigEntry
|
||||||
|
_current_version: str
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
||||||
"""Initialize coordinator."""
|
"""Initialize coordinator."""
|
||||||
@ -42,11 +44,27 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
|||||||
assert self.config_entry.unique_id
|
assert self.config_entry.unique_id
|
||||||
self.serial_number = self.config_entry.unique_id
|
self.serial_number = self.config_entry.unique_id
|
||||||
|
|
||||||
|
async def _async_setup(self) -> None:
|
||||||
|
"""Set up the coordinator."""
|
||||||
|
self._current_version = (
|
||||||
|
await self.client.get_current_measures()
|
||||||
|
).firmware_version
|
||||||
|
|
||||||
async def _async_update_data(self) -> AirGradientData:
|
async def _async_update_data(self) -> AirGradientData:
|
||||||
try:
|
try:
|
||||||
measures = await self.client.get_current_measures()
|
measures = await self.client.get_current_measures()
|
||||||
config = await self.client.get_config()
|
config = await self.client.get_config()
|
||||||
except AirGradientError as error:
|
except AirGradientError as error:
|
||||||
raise UpdateFailed(error) from error
|
raise UpdateFailed(error) from error
|
||||||
else:
|
if measures.firmware_version != self._current_version:
|
||||||
return AirGradientData(measures, config)
|
device_registry = dr.async_get(self.hass)
|
||||||
|
device_entry = device_registry.async_get_device(
|
||||||
|
identifiers={(DOMAIN, self.serial_number)}
|
||||||
|
)
|
||||||
|
assert device_entry
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device_entry.id,
|
||||||
|
sw_version=measures.firmware_version,
|
||||||
|
)
|
||||||
|
self._current_version = measures.firmware_version
|
||||||
|
return AirGradientData(measures, config)
|
||||||
|
18
homeassistant/components/airgradient/diagnostics.py
Normal file
18
homeassistant/components/airgradient/diagnostics.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
"""Diagnostics support for Airgradient."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import asdict
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import AirGradientConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, entry: AirGradientConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
|
||||||
|
return asdict(entry.runtime_data.data)
|
@ -6,6 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["airgradient==0.9.0"],
|
"requirements": ["airgradient==0.9.1"],
|
||||||
"zeroconf": ["_airgradient._tcp.local."]
|
"zeroconf": ["_airgradient._tcp.local."]
|
||||||
}
|
}
|
||||||
|
80
homeassistant/components/airgradient/quality_scale.yaml
Normal file
80
homeassistant/components/airgradient/quality_scale.yaml
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not provide additional actions.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not provide additional actions.
|
||||||
|
docs-high-level-description: todo
|
||||||
|
docs-installation-instructions: todo
|
||||||
|
docs-removal-instructions: todo
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Entities of this integration does not explicitly subscribe to events.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions: todo
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters: todo
|
||||||
|
docs-installation-parameters: todo
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: done
|
||||||
|
parallel-updates: todo
|
||||||
|
reauthentication-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not require authentication.
|
||||||
|
test-coverage: done
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info: done
|
||||||
|
discovery: done
|
||||||
|
docs-data-update: todo
|
||||||
|
docs-examples: todo
|
||||||
|
docs-known-limitations: todo
|
||||||
|
docs-supported-devices: todo
|
||||||
|
docs-supported-functions: todo
|
||||||
|
docs-troubleshooting: todo
|
||||||
|
docs-use-cases: todo
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration has a fixed single device.
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default: done
|
||||||
|
entity-translations: done
|
||||||
|
exception-translations: todo
|
||||||
|
icon-translations: done
|
||||||
|
reconfiguration-flow: todo
|
||||||
|
repair-issues:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration doesn't have any cases where raising an issue is needed.
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration has a fixed single device.
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession: done
|
||||||
|
strict-typing: done
|
@ -1,7 +1,8 @@
|
|||||||
"""Airgradient Update platform."""
|
"""Airgradient Update platform."""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from functools import cached_property
|
|
||||||
|
from propcache import cached_property
|
||||||
|
|
||||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
@ -7,6 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["airly"],
|
"loggers": ["airly"],
|
||||||
"quality_scale": "platinum",
|
|
||||||
"requirements": ["airly==1.1.0"]
|
"requirements": ["airly==1.1.0"]
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
"""Config flow for AirNow integration."""
|
"""Config flow for AirNow integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@ -12,7 +14,6 @@ from homeassistant.config_entries import (
|
|||||||
ConfigFlow,
|
ConfigFlow,
|
||||||
ConfigFlowResult,
|
ConfigFlowResult,
|
||||||
OptionsFlow,
|
OptionsFlow,
|
||||||
OptionsFlowWithConfigEntry,
|
|
||||||
)
|
)
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
@ -120,12 +121,12 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
@callback
|
@callback
|
||||||
def async_get_options_flow(
|
def async_get_options_flow(
|
||||||
config_entry: ConfigEntry,
|
config_entry: ConfigEntry,
|
||||||
) -> OptionsFlow:
|
) -> AirNowOptionsFlowHandler:
|
||||||
"""Return the options flow."""
|
"""Return the options flow."""
|
||||||
return AirNowOptionsFlowHandler(config_entry)
|
return AirNowOptionsFlowHandler()
|
||||||
|
|
||||||
|
|
||||||
class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
class AirNowOptionsFlowHandler(OptionsFlow):
|
||||||
"""Handle an options flow for AirNow."""
|
"""Handle an options flow for AirNow."""
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
@ -136,12 +137,7 @@ class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
|||||||
return self.async_create_entry(data=user_input)
|
return self.async_create_entry(data=user_input)
|
||||||
|
|
||||||
options_schema = vol.Schema(
|
options_schema = vol.Schema(
|
||||||
{
|
{vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))}
|
||||||
vol.Optional(CONF_RADIUS): vol.All(
|
|
||||||
int,
|
|
||||||
vol.Range(min=5),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioairq"],
|
"loggers": ["aioairq"],
|
||||||
"requirements": ["aioairq==0.3.2"]
|
"requirements": ["aioairq==0.4.3"]
|
||||||
}
|
}
|
||||||
|
@ -42,6 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
|
|||||||
coordinator = DataUpdateCoordinator(
|
coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_method=_update_method,
|
update_method=_update_method,
|
||||||
update_interval=SCAN_INTERVAL,
|
update_interval=SCAN_INTERVAL,
|
||||||
|
@ -2,75 +2,27 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
|
||||||
from bleak_retry_connector import close_stale_connections_by_address
|
|
||||||
|
|
||||||
from homeassistant.components import bluetooth
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
|
||||||
|
|
||||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, MAX_RETRIES_AFTER_STARTUP
|
from .const import MAX_RETRIES_AFTER_STARTUP
|
||||||
|
from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator[AirthingsDevice]
|
|
||||||
AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant, entry: AirthingsBLEConfigEntry
|
hass: HomeAssistant, entry: AirthingsBLEConfigEntry
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Set up Airthings BLE device from a config entry."""
|
"""Set up Airthings BLE device from a config entry."""
|
||||||
hass.data.setdefault(DOMAIN, {})
|
coordinator = AirthingsBLEDataUpdateCoordinator(hass, entry)
|
||||||
address = entry.unique_id
|
|
||||||
|
|
||||||
is_metric = hass.config.units is METRIC_SYSTEM
|
|
||||||
assert address is not None
|
|
||||||
|
|
||||||
await close_stale_connections_by_address(address)
|
|
||||||
|
|
||||||
ble_device = bluetooth.async_ble_device_from_address(hass, address)
|
|
||||||
|
|
||||||
if not ble_device:
|
|
||||||
raise ConfigEntryNotReady(
|
|
||||||
f"Could not find Airthings device with address {address}"
|
|
||||||
)
|
|
||||||
|
|
||||||
airthings = AirthingsBluetoothDeviceData(_LOGGER, is_metric)
|
|
||||||
|
|
||||||
async def _async_update_method() -> AirthingsDevice:
|
|
||||||
"""Get data from Airthings BLE."""
|
|
||||||
try:
|
|
||||||
data = await airthings.update_device(ble_device)
|
|
||||||
except Exception as err:
|
|
||||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
coordinator: AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator(
|
|
||||||
hass,
|
|
||||||
_LOGGER,
|
|
||||||
name=DOMAIN,
|
|
||||||
update_method=_async_update_method,
|
|
||||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
|
||||||
)
|
|
||||||
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
# Once its setup and we know we are not going to delay
|
# Once its setup and we know we are not going to delay
|
||||||
# the startup of Home Assistant, we can set the max attempts
|
# the startup of Home Assistant, we can set the max attempts
|
||||||
# to a higher value. If the first connection attempt fails,
|
# to a higher value. If the first connection attempt fails,
|
||||||
# Home Assistant's built-in retry logic will take over.
|
# Home Assistant's built-in retry logic will take over.
|
||||||
airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP)
|
coordinator.airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP)
|
||||||
|
|
||||||
entry.runtime_data = coordinator
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
|
68
homeassistant/components/airthings_ble/coordinator.py
Normal file
68
homeassistant/components/airthings_ble/coordinator.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
"""The Airthings BLE integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
||||||
|
from bleak.backends.device import BLEDevice
|
||||||
|
from bleak_retry_connector import close_stale_connections_by_address
|
||||||
|
|
||||||
|
from homeassistant.components import bluetooth
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
|
||||||
|
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||||
|
"""Class to manage fetching Airthings BLE data."""
|
||||||
|
|
||||||
|
ble_device: BLEDevice
|
||||||
|
config_entry: AirthingsBLEConfigEntry
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, entry: AirthingsBLEConfigEntry) -> None:
|
||||||
|
"""Initialize the coordinator."""
|
||||||
|
self.airthings = AirthingsBluetoothDeviceData(
|
||||||
|
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||||
|
)
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_setup(self) -> None:
|
||||||
|
"""Set up the coordinator."""
|
||||||
|
address = self.config_entry.unique_id
|
||||||
|
|
||||||
|
assert address is not None
|
||||||
|
|
||||||
|
await close_stale_connections_by_address(address)
|
||||||
|
|
||||||
|
ble_device = bluetooth.async_ble_device_from_address(self.hass, address)
|
||||||
|
|
||||||
|
if not ble_device:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
f"Could not find Airthings device with address {address}"
|
||||||
|
)
|
||||||
|
self.ble_device = ble_device
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> AirthingsDevice:
|
||||||
|
"""Get data from Airthings BLE."""
|
||||||
|
try:
|
||||||
|
data = await self.airthings.update_device(self.ble_device)
|
||||||
|
except Exception as err:
|
||||||
|
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||||
|
|
||||||
|
return data
|
@ -24,5 +24,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["airthings-ble==0.9.1"]
|
"requirements": ["airthings-ble==0.9.2"]
|
||||||
}
|
}
|
||||||
|
@ -34,8 +34,8 @@ from homeassistant.helpers.typing import StateType
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
|
||||||
from . import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator
|
|
||||||
from .const import DOMAIN, VOLUME_BECQUEREL, VOLUME_PICOCURIE
|
from .const import DOMAIN, VOLUME_BECQUEREL, VOLUME_PICOCURIE
|
||||||
|
from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -9,8 +9,6 @@ from homeassistant.const import CONF_HOST, Platform
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.COVER]
|
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.COVER]
|
||||||
|
|
||||||
type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
||||||
@ -19,8 +17,6 @@ type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
|||||||
async def async_setup_entry(hass: HomeAssistant, entry: Airtouch5ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: Airtouch5ConfigEntry) -> bool:
|
||||||
"""Set up Airtouch 5 from a config entry."""
|
"""Set up Airtouch 5 from a config entry."""
|
||||||
|
|
||||||
hass.data.setdefault(DOMAIN, {})
|
|
||||||
|
|
||||||
# Create API instance
|
# Create API instance
|
||||||
host = entry.data[CONF_HOST]
|
host = entry.data[CONF_HOST]
|
||||||
client = Airtouch5SimpleClient(host)
|
client = Airtouch5SimpleClient(host)
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["airtouch5py"],
|
"loggers": ["airtouch5py"],
|
||||||
"requirements": ["airtouch5py==0.2.10"]
|
"requirements": ["airtouch5py==0.2.11"]
|
||||||
}
|
}
|
||||||
|
@ -204,6 +204,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) ->
|
|||||||
coordinator = DataUpdateCoordinator(
|
coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name=async_get_geography_id(entry.data),
|
name=async_get_geography_id(entry.data),
|
||||||
# We give a placeholder update interval in order to create the coordinator;
|
# We give a placeholder update interval in order to create the coordinator;
|
||||||
# then, below, we use the coordinator's presence (along with any other
|
# then, below, we use the coordinator's presence (along with any other
|
||||||
|
@ -16,7 +16,12 @@ from pyairvisual.cloud_api import (
|
|||||||
from pyairvisual.errors import AirVisualError
|
from pyairvisual.errors import AirVisualError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import (
|
||||||
|
SOURCE_REAUTH,
|
||||||
|
ConfigEntry,
|
||||||
|
ConfigFlow,
|
||||||
|
ConfigFlowResult,
|
||||||
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_API_KEY,
|
CONF_API_KEY,
|
||||||
CONF_COUNTRY,
|
CONF_COUNTRY,
|
||||||
@ -140,12 +145,11 @@ class AirVisualFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
valid_keys.add(user_input[CONF_API_KEY])
|
valid_keys.add(user_input[CONF_API_KEY])
|
||||||
|
|
||||||
if existing_entry := await self.async_set_unique_id(self._geo_id):
|
if self.source == SOURCE_REAUTH:
|
||||||
self.hass.config_entries.async_update_entry(existing_entry, data=user_input)
|
return self.async_update_reload_and_abort(
|
||||||
self.hass.async_create_task(
|
self._get_reauth_entry(),
|
||||||
self.hass.config_entries.async_reload(existing_entry.entry_id)
|
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
|
||||||
)
|
)
|
||||||
return self.async_abort(reason="reauth_successful")
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=f"Cloud API ({self._geo_id})",
|
title=f"Cloud API ({self._geo_id})",
|
||||||
|
@ -32,7 +32,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"general_error": "[%key:common::config_flow::error::unknown%]",
|
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||||
"location_not_found": "Location not found",
|
"location_not_found": "Location not found",
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||||
|
@ -81,6 +81,7 @@ async def async_setup_entry(
|
|||||||
coordinator = DataUpdateCoordinator(
|
coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name="Node/Pro data",
|
name="Node/Pro data",
|
||||||
update_interval=UPDATE_INTERVAL,
|
update_interval=UPDATE_INTERVAL,
|
||||||
update_method=async_get_data,
|
update_method=async_get_data,
|
||||||
|
@ -14,7 +14,7 @@ from pyairvisual.node import (
|
|||||||
)
|
)
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD
|
||||||
|
|
||||||
from .const import DOMAIN, LOGGER
|
from .const import DOMAIN, LOGGER
|
||||||
@ -76,9 +76,7 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
|
||||||
def __init__(self) -> None:
|
_reauth_entry_data: Mapping[str, Any]
|
||||||
"""Initialize."""
|
|
||||||
self._reauth_entry: ConfigEntry | None = None
|
|
||||||
|
|
||||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||||
"""Import a config entry from `airvisual` integration (see #83882)."""
|
"""Import a config entry from `airvisual` integration (see #83882)."""
|
||||||
@ -88,9 +86,7 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
self, entry_data: Mapping[str, Any]
|
self, entry_data: Mapping[str, Any]
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle configuration by re-auth."""
|
"""Handle configuration by re-auth."""
|
||||||
self._reauth_entry = self.hass.config_entries.async_get_entry(
|
self._reauth_entry_data = entry_data
|
||||||
self.context["entry_id"]
|
|
||||||
)
|
|
||||||
return await self.async_step_reauth_confirm()
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
async def async_step_reauth_confirm(
|
async def async_step_reauth_confirm(
|
||||||
@ -102,10 +98,8 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
step_id="reauth_confirm", data_schema=STEP_REAUTH_SCHEMA
|
step_id="reauth_confirm", data_schema=STEP_REAUTH_SCHEMA
|
||||||
)
|
)
|
||||||
|
|
||||||
assert self._reauth_entry
|
|
||||||
|
|
||||||
validation_result = await async_validate_credentials(
|
validation_result = await async_validate_credentials(
|
||||||
self._reauth_entry.data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD]
|
self._reauth_entry_data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD]
|
||||||
)
|
)
|
||||||
|
|
||||||
if validation_result.errors:
|
if validation_result.errors:
|
||||||
@ -115,13 +109,9 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
errors=validation_result.errors,
|
errors=validation_result.errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.hass.config_entries.async_update_entry(
|
return self.async_update_reload_and_abort(
|
||||||
self._reauth_entry, data=self._reauth_entry.data | user_input
|
self._get_reauth_entry(), data_updates=user_input
|
||||||
)
|
)
|
||||||
self.hass.async_create_task(
|
|
||||||
self.hass.config_entries.async_reload(self._reauth_entry.entry_id)
|
|
||||||
)
|
|
||||||
return self.async_abort(reason="reauth_successful")
|
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
|
@ -24,6 +24,7 @@ PLATFORMS: list[Platform] = [
|
|||||||
Platform.CLIMATE,
|
Platform.CLIMATE,
|
||||||
Platform.SELECT,
|
Platform.SELECT,
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
|
Platform.SWITCH,
|
||||||
Platform.WATER_HEATER,
|
Platform.WATER_HEATER,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -85,6 +85,7 @@ HVAC_MODE_LIB_TO_HASS: Final[dict[OperationMode, HVACMode]] = {
|
|||||||
OperationMode.HEATING: HVACMode.HEAT,
|
OperationMode.HEATING: HVACMode.HEAT,
|
||||||
OperationMode.FAN: HVACMode.FAN_ONLY,
|
OperationMode.FAN: HVACMode.FAN_ONLY,
|
||||||
OperationMode.DRY: HVACMode.DRY,
|
OperationMode.DRY: HVACMode.DRY,
|
||||||
|
OperationMode.AUX_HEATING: HVACMode.HEAT,
|
||||||
OperationMode.AUTO: HVACMode.HEAT_COOL,
|
OperationMode.AUTO: HVACMode.HEAT_COOL,
|
||||||
}
|
}
|
||||||
HVAC_MODE_HASS_TO_LIB: Final[dict[HVACMode, OperationMode]] = {
|
HVAC_MODE_HASS_TO_LIB: Final[dict[HVACMode, OperationMode]] = {
|
||||||
@ -157,9 +158,10 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
|||||||
self._attr_temperature_unit = TEMP_UNIT_LIB_TO_HASS[
|
self._attr_temperature_unit = TEMP_UNIT_LIB_TO_HASS[
|
||||||
self.get_airzone_value(AZD_TEMP_UNIT)
|
self.get_airzone_value(AZD_TEMP_UNIT)
|
||||||
]
|
]
|
||||||
self._attr_hvac_modes = [
|
_attr_hvac_modes = [
|
||||||
HVAC_MODE_LIB_TO_HASS[mode] for mode in self.get_airzone_value(AZD_MODES)
|
HVAC_MODE_LIB_TO_HASS[mode] for mode in self.get_airzone_value(AZD_MODES)
|
||||||
]
|
]
|
||||||
|
self._attr_hvac_modes = list(dict.fromkeys(_attr_hvac_modes))
|
||||||
if (
|
if (
|
||||||
self.get_airzone_value(AZD_SPEED) is not None
|
self.get_airzone_value(AZD_SPEED) is not None
|
||||||
and self.get_airzone_value(AZD_SPEEDS) is not None
|
and self.get_airzone_value(AZD_SPEEDS) is not None
|
||||||
@ -273,12 +275,18 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
|||||||
self._attr_min_temp = self.get_airzone_value(AZD_TEMP_MIN)
|
self._attr_min_temp = self.get_airzone_value(AZD_TEMP_MIN)
|
||||||
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||||
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
||||||
if self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE:
|
if (
|
||||||
|
self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||||
|
and self._attr_hvac_mode == HVACMode.HEAT_COOL
|
||||||
|
):
|
||||||
self._attr_target_temperature_high = self.get_airzone_value(
|
self._attr_target_temperature_high = self.get_airzone_value(
|
||||||
AZD_COOL_TEMP_SET
|
AZD_COOL_TEMP_SET
|
||||||
)
|
)
|
||||||
self._attr_target_temperature_low = self.get_airzone_value(
|
self._attr_target_temperature_low = self.get_airzone_value(
|
||||||
AZD_HEAT_TEMP_SET
|
AZD_HEAT_TEMP_SET
|
||||||
)
|
)
|
||||||
|
self._attr_target_temperature = None
|
||||||
else:
|
else:
|
||||||
|
self._attr_target_temperature_high = None
|
||||||
|
self._attr_target_temperature_low = None
|
||||||
self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET)
|
self._attr_target_temperature = self.get_airzone_value(AZD_TEMP_SET)
|
||||||
|
@ -11,5 +11,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioairzone"],
|
"loggers": ["aioairzone"],
|
||||||
"requirements": ["aioairzone==0.9.3"]
|
"requirements": ["aioairzone==0.9.7"]
|
||||||
}
|
}
|
||||||
|
122
homeassistant/components/airzone/switch.py
Normal file
122
homeassistant/components/airzone/switch.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
"""Support for the Airzone switch."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Final
|
||||||
|
|
||||||
|
from aioairzone.const import API_ON, AZD_ON, AZD_ZONES
|
||||||
|
|
||||||
|
from homeassistant.components.switch import (
|
||||||
|
SwitchDeviceClass,
|
||||||
|
SwitchEntity,
|
||||||
|
SwitchEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
|
||||||
|
from . import AirzoneConfigEntry
|
||||||
|
from .coordinator import AirzoneUpdateCoordinator
|
||||||
|
from .entity import AirzoneEntity, AirzoneZoneEntity
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AirzoneSwitchDescription(SwitchEntityDescription):
|
||||||
|
"""Class to describe an Airzone switch entity."""
|
||||||
|
|
||||||
|
api_param: str
|
||||||
|
|
||||||
|
|
||||||
|
ZONE_SWITCH_TYPES: Final[tuple[AirzoneSwitchDescription, ...]] = (
|
||||||
|
AirzoneSwitchDescription(
|
||||||
|
api_param=API_ON,
|
||||||
|
device_class=SwitchDeviceClass.SWITCH,
|
||||||
|
key=AZD_ON,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AirzoneConfigEntry,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Add Airzone switch from a config_entry."""
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
|
added_zones: set[str] = set()
|
||||||
|
|
||||||
|
def _async_entity_listener() -> None:
|
||||||
|
"""Handle additions of switch."""
|
||||||
|
|
||||||
|
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||||
|
received_zones = set(zones_data)
|
||||||
|
new_zones = received_zones - added_zones
|
||||||
|
if new_zones:
|
||||||
|
async_add_entities(
|
||||||
|
AirzoneZoneSwitch(
|
||||||
|
coordinator,
|
||||||
|
description,
|
||||||
|
entry,
|
||||||
|
system_zone_id,
|
||||||
|
zones_data.get(system_zone_id),
|
||||||
|
)
|
||||||
|
for system_zone_id in new_zones
|
||||||
|
for description in ZONE_SWITCH_TYPES
|
||||||
|
if description.key in zones_data.get(system_zone_id)
|
||||||
|
)
|
||||||
|
added_zones.update(new_zones)
|
||||||
|
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||||
|
_async_entity_listener()
|
||||||
|
|
||||||
|
|
||||||
|
class AirzoneBaseSwitch(AirzoneEntity, SwitchEntity):
|
||||||
|
"""Define an Airzone switch."""
|
||||||
|
|
||||||
|
entity_description: AirzoneSwitchDescription
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_coordinator_update(self) -> None:
|
||||||
|
"""Update attributes when the coordinator updates."""
|
||||||
|
self._async_update_attrs()
|
||||||
|
super()._handle_coordinator_update()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_update_attrs(self) -> None:
|
||||||
|
"""Update switch attributes."""
|
||||||
|
self._attr_is_on = self.get_airzone_value(self.entity_description.key)
|
||||||
|
|
||||||
|
|
||||||
|
class AirzoneZoneSwitch(AirzoneZoneEntity, AirzoneBaseSwitch):
|
||||||
|
"""Define an Airzone Zone switch."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AirzoneUpdateCoordinator,
|
||||||
|
description: AirzoneSwitchDescription,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
system_zone_id: str,
|
||||||
|
zone_data: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Initialize."""
|
||||||
|
super().__init__(coordinator, entry, system_zone_id, zone_data)
|
||||||
|
|
||||||
|
self._attr_name = None
|
||||||
|
self._attr_unique_id = (
|
||||||
|
f"{self._attr_unique_id}_{system_zone_id}_{description.key}"
|
||||||
|
)
|
||||||
|
self.entity_description = description
|
||||||
|
|
||||||
|
self._async_update_attrs()
|
||||||
|
|
||||||
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the entity on."""
|
||||||
|
param = self.entity_description.api_param
|
||||||
|
await self._async_update_hvac_params({param: True})
|
||||||
|
|
||||||
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
|
"""Turn the entity off."""
|
||||||
|
param = self.entity_description.api_param
|
||||||
|
await self._async_update_hvac_params({param: False})
|
@ -17,6 +17,7 @@ PLATFORMS: list[Platform] = [
|
|||||||
Platform.CLIMATE,
|
Platform.CLIMATE,
|
||||||
Platform.SELECT,
|
Platform.SELECT,
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
|
Platform.SWITCH,
|
||||||
Platform.WATER_HEATER,
|
Platform.WATER_HEATER,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user