Compare commits

..

1 Commits

Author SHA1 Message Date
Franck Nijhof
35c8fefbd6 Fix AttributeError in radio_browser media source when runtime_data is missing
The media_player.play_media action was failing with AttributeError when
the radio_browser integration wasn't properly loaded. This happened when
accessing the media source before the config entry was fully initialized.

- Add proper error handling in RadioMediaSource.radios property
- Raise Unresolvable exception when runtime_data is missing or None
- Add comprehensive tests for the error handling scenario

Fixes #141755

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-07-04 20:38:42 +00:00
3502 changed files with 42848 additions and 283220 deletions

View File

@@ -8,9 +8,6 @@
"PYTHONASYNCIODEBUG": "1" "PYTHONASYNCIODEBUG": "1"
}, },
"features": { "features": {
// Node feature required for Claude Code until fixed https://github.com/anthropics/devcontainer-features/issues/28
"ghcr.io/devcontainers/features/node:1": {},
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
"ghcr.io/devcontainers/features/github-cli:1": {} "ghcr.io/devcontainers/features/github-cli:1": {}
}, },
// Port 5683 udp is used by Shelly integration // Port 5683 udp is used by Shelly integration

View File

@@ -14,8 +14,7 @@ tests
# Other virtualization methods # Other virtualization methods
venv venv
.venv
.vagrant .vagrant
# Temporary files # Temporary files
**/__pycache__ **/__pycache__

View File

@@ -21,7 +21,7 @@ body:
- type: textarea - type: textarea
id: description id: description
attributes: attributes:
label: Description label: Task description
description: | description: |
Provide a clear and detailed description of the task that needs to be accomplished. Provide a clear and detailed description of the task that needs to be accomplished.
@@ -43,11 +43,9 @@ body:
Include links to related issues, research, prototypes, roadmap opportunities etc. Include links to related issues, research, prototypes, roadmap opportunities etc.
placeholder: | placeholder: |
- Roadmap opportunity: [link] - Roadmap opportunity: [links]
- Epic: [link]
- Feature request: [link] - Feature request: [link]
- Technical design documents: [link] - Technical design documents: [link]
- Prototype/mockup: [link] - Prototype/mockup: [link]
- Dependencies: [links]
validations: validations:
required: false required: false

View File

@@ -45,12 +45,6 @@ rules:
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules. **When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
## Code Review Guidelines
**When reviewing code, do NOT comment on:**
- **Missing imports** - We use static analysis tooling to catch that
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
## Python Requirements ## Python Requirements
- **Compatibility**: Python 3.13+ - **Compatibility**: Python 3.13+
@@ -1073,11 +1067,7 @@ async def test_flow_connection_error(hass, mock_api_error):
### Entity Testing Patterns ### Entity Testing Patterns
```python ```python
@pytest.fixture @pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True)
def platforms() -> list[Platform]:
"""Overridden fixture to specify platforms to test."""
return [Platform.SENSOR] # Or another specific platform as needed.
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") @pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
async def test_entities( async def test_entities(
hass: HomeAssistant, hass: HomeAssistant,
@@ -1124,25 +1114,16 @@ def mock_device_api() -> Generator[MagicMock]:
) )
yield api yield api
@pytest.fixture
def platforms() -> list[Platform]:
"""Fixture to specify platforms to test."""
return PLATFORMS
@pytest.fixture @pytest.fixture
async def init_integration( async def init_integration(
hass: HomeAssistant, hass: HomeAssistant,
mock_config_entry: MockConfigEntry, mock_config_entry: MockConfigEntry,
mock_device_api: MagicMock, mock_device_api: MagicMock,
platforms: list[Platform],
) -> MockConfigEntry: ) -> MockConfigEntry:
"""Set up the integration for testing.""" """Set up the integration for testing."""
mock_config_entry.add_to_hass(hass) mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
with patch("homeassistant.components.my_integration.PLATFORMS", platforms): await hass.async_block_till_done()
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
return mock_config_entry return mock_config_entry
``` ```
@@ -1168,7 +1149,7 @@ _LOGGER.debug("Processing data: %s", data) # Use lazy logging
### Validation Commands ### Validation Commands
```bash ```bash
# Check specific integration # Check specific integration
python -m script.hassfest --integration-path homeassistant/components/my_integration python -m script.hassfest --integration my_integration
# Validate quality scale # Validate quality scale
# Check quality_scale.yaml against current rules # Check quality_scale.yaml against current rules

View File

@@ -6,6 +6,3 @@ updates:
interval: daily interval: daily
time: "06:00" time: "06:00"
open-pull-requests-limit: 10 open-pull-requests-limit: 10
labels:
- dependency
- github_actions

View File

@@ -27,12 +27,12 @@ jobs:
publish: ${{ steps.version.outputs.publish }} publish: ${{ steps.version.outputs.publish }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
@@ -90,7 +90,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Download nightly wheels of frontend - name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev' if: needs.init.outputs.channel == 'dev'
@@ -116,7 +116,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev' if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
@@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations - name: Download translations
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: translations name: translations
@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v3.5.0 uses: docker/login-action@v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -242,7 +242,7 @@ jobs:
- green - green
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set build additional args - name: Set build additional args
run: | run: |
@@ -256,7 +256,7 @@ jobs:
fi fi
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v3.5.0 uses: docker/login-action@v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -279,7 +279,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Initialize git - name: Initialize git
uses: home-assistant/actions/helpers/git-init@master uses: home-assistant/actions/helpers/git-init@master
@@ -321,23 +321,23 @@ jobs:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"] registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Install Cosign - name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2 uses: sigstore/cosign-installer@v3.9.1
with: with:
cosign-release: "v2.2.3" cosign-release: "v2.2.3"
- name: Login to DockerHub - name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant' if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@v3.5.0 uses: docker/login-action@v3.4.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant' if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@v3.5.0 uses: docker/login-action@v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -454,15 +454,15 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true' if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations - name: Download translations
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: translations name: translations
@@ -480,7 +480,7 @@ jobs:
python -m build python -m build
- name: Upload package to PyPI - name: Upload package to PyPI
uses: pypa/gh-action-pypi-publish@v1.13.0 uses: pypa/gh-action-pypi-publish@v1.12.4
with: with:
skip-existing: true skip-existing: true
@@ -499,10 +499,10 @@ jobs:
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }} HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation - name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with: with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }} subject-digest: ${{ steps.push.outputs.digest }}

View File

@@ -37,10 +37,10 @@ on:
type: boolean type: boolean
env: env:
CACHE_VERSION: 7 CACHE_VERSION: 4
UV_CACHE_VERSION: 1 UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.10" HA_SHORT_VERSION: "2025.8"
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']" ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version # 10.3 is the oldest supported version
@@ -94,7 +94,7 @@ jobs:
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Generate partial Python venv restore key - name: Generate partial Python venv restore key
id: generate_python_cache_key id: generate_python_cache_key
run: | run: |
@@ -246,16 +246,16 @@ jobs:
- info - info
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.4 uses: actions/cache@v4.2.3
with: with:
path: venv path: venv
key: >- key: >-
@@ -271,7 +271,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)" uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.4 uses: actions/cache@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@@ -292,16 +292,16 @@ jobs:
- pre-commit - pre-commit
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -310,7 +310,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -332,16 +332,16 @@ jobs:
- pre-commit - pre-commit
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -350,7 +350,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -372,16 +372,16 @@ jobs:
- pre-commit - pre-commit
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -390,7 +390,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -462,7 +462,7 @@ jobs:
- script/hassfest/docker/Dockerfile - script/hassfest/docker/Dockerfile
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Register hadolint problem matcher - name: Register hadolint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json" echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -481,10 +481,10 @@ jobs:
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
check-latest: true check-latest: true
@@ -497,7 +497,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.4 uses: actions/cache@v4.2.3
with: with:
path: venv path: venv
key: >- key: >-
@@ -505,7 +505,7 @@ jobs:
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache - name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v4.2.4 uses: actions/cache@v4.2.3
with: with:
path: ${{ env.UV_CACHE_DIR }} path: ${{ env.UV_CACHE_DIR }}
key: >- key: >-
@@ -517,7 +517,6 @@ jobs:
env.HA_SHORT_VERSION }}- env.HA_SHORT_VERSION }}-
- name: Install additional OS dependencies - name: Install additional OS dependencies
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
@@ -579,23 +578,22 @@ jobs:
- base - base
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
sudo apt-get -y install \ sudo apt-get -y install \
libturbojpeg libturbojpeg
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -619,16 +617,16 @@ jobs:
- base - base
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -653,9 +651,9 @@ jobs:
&& github.event_name == 'pull_request' && github.event_name == 'pull_request'
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Dependency review - name: Dependency review
uses: actions/dependency-review-action@v4.7.3 uses: actions/dependency-review-action@v4.7.1
with: with:
license-check: false # We use our own license audit checks license-check: false # We use our own license audit checks
@@ -676,16 +674,16 @@ jobs:
python-version: ${{ fromJson(needs.info.outputs.python_versions) }} python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -719,16 +717,16 @@ jobs:
- base - base
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -766,16 +764,16 @@ jobs:
- base - base
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -811,10 +809,10 @@ jobs:
- base - base
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
@@ -827,7 +825,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -835,7 +833,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore mypy cache - name: Restore mypy cache
uses: actions/cache@v4.2.4 uses: actions/cache@v4.2.3
with: with:
path: .mypy_cache path: .mypy_cache
key: >- key: >-
@@ -879,7 +877,6 @@ jobs:
name: Split tests for full run name: Split tests for full run
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
@@ -889,16 +886,16 @@ jobs:
libturbojpeg \ libturbojpeg \
libgammu-dev libgammu-dev
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -940,7 +937,6 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
@@ -951,16 +947,16 @@ jobs:
libgammu-dev \ libgammu-dev \
libxml2-utils libxml2-utils
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -974,7 +970,7 @@ jobs:
run: | run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets - name: Download pytest_buckets
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: pytest_buckets name: pytest_buckets
- name: Compile English translations - name: Compile English translations
@@ -1074,7 +1070,6 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }} Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
@@ -1085,16 +1080,16 @@ jobs:
libmariadb-dev-compat \ libmariadb-dev-compat \
libxml2-utils libxml2-utils
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1215,7 +1210,6 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }} Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
@@ -1228,16 +1222,16 @@ jobs:
sudo apt-get -y install \ sudo apt-get -y install \
postgresql-server-dev-14 postgresql-server-dev-14
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1340,14 +1334,14 @@ jobs:
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
pattern: coverage-* pattern: coverage-*
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'true' if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v5.5.1 uses: codecov/codecov-action@v5.4.3
with: with:
fail_ci_if_error: true fail_ci_if_error: true
flags: full-suite flags: full-suite
@@ -1377,7 +1371,6 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
timeout-minutes: 5
run: | run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update sudo apt-get update
@@ -1388,16 +1381,16 @@ jobs:
libgammu-dev \ libgammu-dev \
libxml2-utils libxml2-utils
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.4 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1491,14 +1484,14 @@ jobs:
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
pattern: coverage-* pattern: coverage-*
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'false' if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v5.5.1 uses: codecov/codecov-action@v5.4.3
with: with:
fail_ci_if_error: true fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
@@ -1518,7 +1511,7 @@ jobs:
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
pattern: test-results-* pattern: test-results-*
- name: Upload test results to Codecov - name: Upload test results to Codecov

View File

@@ -21,14 +21,14 @@ jobs:
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v3.30.1 uses: github/codeql-action/init@v3.29.2
with: with:
languages: python languages: python
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.30.1 uses: github/codeql-action/analyze@v3.29.2
with: with:
category: "/language:python" category: "/language:python"

View File

@@ -16,7 +16,7 @@ jobs:
steps: steps:
- name: Check if integration label was added and extract details - name: Check if integration label was added and extract details
id: extract id: extract
uses: actions/github-script@v8 uses: actions/github-script@v7.0.1
with: with:
script: | script: |
// Debug: Log the event payload // Debug: Log the event payload
@@ -113,7 +113,7 @@ jobs:
- name: Fetch similar issues - name: Fetch similar issues
id: fetch_similar id: fetch_similar
if: steps.extract.outputs.should_continue == 'true' if: steps.extract.outputs.should_continue == 'true'
uses: actions/github-script@v8 uses: actions/github-script@v7.0.1
env: env:
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }} INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }} CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
@@ -231,7 +231,7 @@ jobs:
- name: Detect duplicates using AI - name: Detect duplicates using AI
id: ai_detection id: ai_detection
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true' if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/ai-inference@v2.0.1 uses: actions/ai-inference@v1.1.0
with: with:
model: openai/gpt-4o model: openai/gpt-4o
system-prompt: | system-prompt: |
@@ -280,7 +280,7 @@ jobs:
- name: Post duplicate detection results - name: Post duplicate detection results
id: post_results id: post_results
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true' if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/github-script@v8 uses: actions/github-script@v7.0.1
env: env:
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }} AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }} SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}

View File

@@ -16,7 +16,7 @@ jobs:
steps: steps:
- name: Check issue language - name: Check issue language
id: detect_language id: detect_language
uses: actions/github-script@v8 uses: actions/github-script@v7.0.1
env: env:
ISSUE_NUMBER: ${{ github.event.issue.number }} ISSUE_NUMBER: ${{ github.event.issue.number }}
ISSUE_TITLE: ${{ github.event.issue.title }} ISSUE_TITLE: ${{ github.event.issue.title }}
@@ -57,7 +57,7 @@ jobs:
- name: Detect language using AI - name: Detect language using AI
id: ai_language_detection id: ai_language_detection
if: steps.detect_language.outputs.should_continue == 'true' if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/ai-inference@v2.0.1 uses: actions/ai-inference@v1.1.0
with: with:
model: openai/gpt-4o-mini model: openai/gpt-4o-mini
system-prompt: | system-prompt: |
@@ -90,7 +90,7 @@ jobs:
- name: Process non-English issues - name: Process non-English issues
if: steps.detect_language.outputs.should_continue == 'true' if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/github-script@v8 uses: actions/github-script@v7.0.1
env: env:
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }} AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }} ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}

View File

@@ -9,10 +9,10 @@ jobs:
check-authorization: check-authorization:
runs-on: ubuntu-latest runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form) # Only run if this is a Task issue type (from the issue form)
if: github.event.issue.type.name == 'Task' if: github.event.issue.issue_type == 'Task'
steps: steps:
- name: Check if user is authorized - name: Check if user is authorized
uses: actions/github-script@v8 uses: actions/github-script@v7
with: with:
script: | script: |
const issueAuthor = context.payload.issue.user.login; const issueAuthor = context.payload.issue.user.login;

View File

@@ -17,7 +17,7 @@ jobs:
# - No PRs marked as no-stale # - No PRs marked as no-stale
# - No issues (-1) # - No issues (-1)
- name: 60 days stale PRs policy - name: 60 days stale PRs policy
uses: actions/stale@v10.0.0 uses: actions/stale@v9.1.0
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60 days-before-stale: 60
@@ -57,7 +57,7 @@ jobs:
# - No issues marked as no-stale or help-wanted # - No issues marked as no-stale or help-wanted
# - No PRs (-1) # - No PRs (-1)
- name: 90 days stale issues - name: 90 days stale issues
uses: actions/stale@v10.0.0 uses: actions/stale@v9.1.0
with: with:
repo-token: ${{ steps.token.outputs.token }} repo-token: ${{ steps.token.outputs.token }}
days-before-stale: 90 days-before-stale: 90
@@ -87,7 +87,7 @@ jobs:
# - No Issues marked as no-stale or help-wanted # - No Issues marked as no-stale or help-wanted
# - No PRs (-1) # - No PRs (-1)
- name: Needs more information stale issues policy - name: Needs more information stale issues policy
uses: actions/stale@v10.0.0 uses: actions/stale@v9.1.0
with: with:
repo-token: ${{ steps.token.outputs.token }} repo-token: ${{ steps.token.outputs.token }}
only-labels: "needs-more-information" only-labels: "needs-more-information"

View File

@@ -19,10 +19,10 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -32,11 +32,11 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }} architectures: ${{ steps.info.outputs.architectures }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python id: python
uses: actions/setup-python@v6.0.0 uses: actions/setup-python@v5.6.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
@@ -135,20 +135,20 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Download env_file - name: Download env_file
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: env_file name: env_file
- name: Download build_constraints - name: Download build_constraints
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: build_constraints name: build_constraints
- name: Download requirements_diff - name: Download requirements_diff
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: requirements_diff name: requirements_diff
@@ -159,7 +159,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt sed -i "/uv/d" requirements_diff.txt
- name: Build wheels - name: Build wheels
uses: home-assistant/wheels@2025.07.0 uses: home-assistant/wheels@2025.03.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2
@@ -184,25 +184,25 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v5.0.0 uses: actions/checkout@v4.2.2
- name: Download env_file - name: Download env_file
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: env_file name: env_file
- name: Download build_constraints - name: Download build_constraints
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: build_constraints name: build_constraints
- name: Download requirements_diff - name: Download requirements_diff
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: requirements_diff name: requirements_diff
- name: Download requirements_all_wheels - name: Download requirements_all_wheels
uses: actions/download-artifact@v5.0.0 uses: actions/download-artifact@v4.3.0
with: with:
name: requirements_all_wheels name: requirements_all_wheels
@@ -219,7 +219,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt sed -i "/uv/d" requirements_diff.txt
- name: Build wheels - name: Build wheels
uses: home-assistant/wheels@2025.07.0 uses: home-assistant/wheels@2025.03.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2

View File

@@ -18,7 +18,7 @@ repos:
exclude_types: [csv, json, html] exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/ exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0 rev: v5.0.0
hooks: hooks:
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
stages: [manual] stages: [manual]

View File

@@ -53,7 +53,6 @@ homeassistant.components.air_quality.*
homeassistant.components.airgradient.* homeassistant.components.airgradient.*
homeassistant.components.airly.* homeassistant.components.airly.*
homeassistant.components.airnow.* homeassistant.components.airnow.*
homeassistant.components.airos.*
homeassistant.components.airq.* homeassistant.components.airq.*
homeassistant.components.airthings.* homeassistant.components.airthings.*
homeassistant.components.airthings_ble.* homeassistant.components.airthings_ble.*
@@ -307,10 +306,10 @@ homeassistant.components.ld2410_ble.*
homeassistant.components.led_ble.* homeassistant.components.led_ble.*
homeassistant.components.lektrico.* homeassistant.components.lektrico.*
homeassistant.components.letpot.* homeassistant.components.letpot.*
homeassistant.components.libre_hardware_monitor.*
homeassistant.components.lidarr.* homeassistant.components.lidarr.*
homeassistant.components.lifx.* homeassistant.components.lifx.*
homeassistant.components.light.* homeassistant.components.light.*
homeassistant.components.linear_garage_door.*
homeassistant.components.linkplay.* homeassistant.components.linkplay.*
homeassistant.components.litejet.* homeassistant.components.litejet.*
homeassistant.components.litterrobot.* homeassistant.components.litterrobot.*
@@ -378,12 +377,10 @@ homeassistant.components.onedrive.*
homeassistant.components.onewire.* homeassistant.components.onewire.*
homeassistant.components.onkyo.* homeassistant.components.onkyo.*
homeassistant.components.open_meteo.* homeassistant.components.open_meteo.*
homeassistant.components.open_router.*
homeassistant.components.openai_conversation.* homeassistant.components.openai_conversation.*
homeassistant.components.openexchangerates.* homeassistant.components.openexchangerates.*
homeassistant.components.opensky.* homeassistant.components.opensky.*
homeassistant.components.openuv.* homeassistant.components.openuv.*
homeassistant.components.opnsense.*
homeassistant.components.opower.* homeassistant.components.opower.*
homeassistant.components.oralb.* homeassistant.components.oralb.*
homeassistant.components.otbr.* homeassistant.components.otbr.*
@@ -460,7 +457,6 @@ homeassistant.components.sensorpush_cloud.*
homeassistant.components.sensoterra.* homeassistant.components.sensoterra.*
homeassistant.components.senz.* homeassistant.components.senz.*
homeassistant.components.sfr_box.* homeassistant.components.sfr_box.*
homeassistant.components.sftp_storage.*
homeassistant.components.shell_command.* homeassistant.components.shell_command.*
homeassistant.components.shelly.* homeassistant.components.shelly.*
homeassistant.components.shopping_list.* homeassistant.components.shopping_list.*
@@ -469,7 +465,6 @@ homeassistant.components.simplisafe.*
homeassistant.components.siren.* homeassistant.components.siren.*
homeassistant.components.skybell.* homeassistant.components.skybell.*
homeassistant.components.slack.* homeassistant.components.slack.*
homeassistant.components.sleep_as_android.*
homeassistant.components.sleepiq.* homeassistant.components.sleepiq.*
homeassistant.components.smhi.* homeassistant.components.smhi.*
homeassistant.components.smlight.* homeassistant.components.smlight.*
@@ -505,7 +500,6 @@ homeassistant.components.tag.*
homeassistant.components.tailscale.* homeassistant.components.tailscale.*
homeassistant.components.tailwind.* homeassistant.components.tailwind.*
homeassistant.components.tami4.* homeassistant.components.tami4.*
homeassistant.components.tankerkoenig.*
homeassistant.components.tautulli.* homeassistant.components.tautulli.*
homeassistant.components.tcp.* homeassistant.components.tcp.*
homeassistant.components.technove.* homeassistant.components.technove.*
@@ -541,7 +535,6 @@ homeassistant.components.unifiprotect.*
homeassistant.components.upcloud.* homeassistant.components.upcloud.*
homeassistant.components.update.* homeassistant.components.update.*
homeassistant.components.uptime.* homeassistant.components.uptime.*
homeassistant.components.uptime_kuma.*
homeassistant.components.uptimerobot.* homeassistant.components.uptimerobot.*
homeassistant.components.usb.* homeassistant.components.usb.*
homeassistant.components.uvc.* homeassistant.components.uvc.*
@@ -551,7 +544,6 @@ homeassistant.components.valve.*
homeassistant.components.velbus.* homeassistant.components.velbus.*
homeassistant.components.vlc_telnet.* homeassistant.components.vlc_telnet.*
homeassistant.components.vodafone_station.* homeassistant.components.vodafone_station.*
homeassistant.components.volvo.*
homeassistant.components.wake_on_lan.* homeassistant.components.wake_on_lan.*
homeassistant.components.wake_word.* homeassistant.components.wake_word.*
homeassistant.components.wallbox.* homeassistant.components.wallbox.*

104
CODEOWNERS generated
View File

@@ -67,8 +67,6 @@ build.json @home-assistant/supervisor
/tests/components/airly/ @bieniu /tests/components/airly/ @bieniu
/homeassistant/components/airnow/ @asymworks /homeassistant/components/airnow/ @asymworks
/tests/components/airnow/ @asymworks /tests/components/airnow/ @asymworks
/homeassistant/components/airos/ @CoMPaTech
/tests/components/airos/ @CoMPaTech
/homeassistant/components/airq/ @Sibgatulin @dl2080 /homeassistant/components/airq/ @Sibgatulin @dl2080
/tests/components/airq/ @Sibgatulin @dl2080 /tests/components/airq/ @Sibgatulin @dl2080
/homeassistant/components/airthings/ @danielhiversen @LaStrada /homeassistant/components/airthings/ @danielhiversen @LaStrada
@@ -87,8 +85,6 @@ build.json @home-assistant/supervisor
/tests/components/airzone/ @Noltari /tests/components/airzone/ @Noltari
/homeassistant/components/airzone_cloud/ @Noltari /homeassistant/components/airzone_cloud/ @Noltari
/tests/components/airzone_cloud/ @Noltari /tests/components/airzone_cloud/ @Noltari
/homeassistant/components/aladdin_connect/ @swcloudgenie
/tests/components/aladdin_connect/ @swcloudgenie
/homeassistant/components/alarm_control_panel/ @home-assistant/core /homeassistant/components/alarm_control_panel/ @home-assistant/core
/tests/components/alarm_control_panel/ @home-assistant/core /tests/components/alarm_control_panel/ @home-assistant/core
/homeassistant/components/alert/ @home-assistant/core @frenck /homeassistant/components/alert/ @home-assistant/core @frenck
@@ -154,12 +150,12 @@ build.json @home-assistant/supervisor
/tests/components/arve/ @ikalnyi /tests/components/arve/ @ikalnyi
/homeassistant/components/aseko_pool_live/ @milanmeu /homeassistant/components/aseko_pool_live/ @milanmeu
/tests/components/aseko_pool_live/ @milanmeu /tests/components/aseko_pool_live/ @milanmeu
/homeassistant/components/assist_pipeline/ @synesthesiam @arturpragacz /homeassistant/components/assist_pipeline/ @balloob @synesthesiam
/tests/components/assist_pipeline/ @synesthesiam @arturpragacz /tests/components/assist_pipeline/ @balloob @synesthesiam
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz /tests/components/assist_satellite/ @home-assistant/core @synesthesiam
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi /homeassistant/components/asuswrt/ @kennedyshead @ollo69
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi /tests/components/asuswrt/ @kennedyshead @ollo69
/homeassistant/components/atag/ @MatsNL /homeassistant/components/atag/ @MatsNL
/tests/components/atag/ @MatsNL /tests/components/atag/ @MatsNL
/homeassistant/components/aten_pe/ @mtdcr /homeassistant/components/aten_pe/ @mtdcr
@@ -298,8 +294,8 @@ build.json @home-assistant/supervisor
/tests/components/configurator/ @home-assistant/core /tests/components/configurator/ @home-assistant/core
/homeassistant/components/control4/ @lawtancool /homeassistant/components/control4/ @lawtancool
/tests/components/control4/ @lawtancool /tests/components/control4/ @lawtancool
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/conversation/ @home-assistant/core @synesthesiam
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz /tests/components/conversation/ @home-assistant/core @synesthesiam
/homeassistant/components/cookidoo/ @miaucl /homeassistant/components/cookidoo/ @miaucl
/tests/components/cookidoo/ @miaucl /tests/components/cookidoo/ @miaucl
/homeassistant/components/coolmaster/ @OnFreund /homeassistant/components/coolmaster/ @OnFreund
@@ -424,8 +420,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/emby/ @mezz64 /homeassistant/components/emby/ @mezz64
/homeassistant/components/emoncms/ @borpin @alexandrecuer /homeassistant/components/emoncms/ @borpin @alexandrecuer
/tests/components/emoncms/ @borpin @alexandrecuer /tests/components/emoncms/ @borpin @alexandrecuer
/homeassistant/components/emoncms_history/ @alexandrecuer
/tests/components/emoncms_history/ @alexandrecuer
/homeassistant/components/emonitor/ @bdraco /homeassistant/components/emonitor/ @bdraco
/tests/components/emonitor/ @bdraco /tests/components/emonitor/ @bdraco
/homeassistant/components/emulated_hue/ @bdraco @Tho85 /homeassistant/components/emulated_hue/ @bdraco @Tho85
@@ -442,8 +436,8 @@ build.json @home-assistant/supervisor
/tests/components/enigma2/ @autinerd /tests/components/enigma2/ @autinerd
/homeassistant/components/enocean/ @bdurrer /homeassistant/components/enocean/ @bdurrer
/tests/components/enocean/ @bdurrer /tests/components/enocean/ @bdurrer
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac /homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac /tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/homeassistant/components/entur_public_transport/ @hfurubotten /homeassistant/components/entur_public_transport/ @hfurubotten
/homeassistant/components/environment_canada/ @gwww @michaeldavie /homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie /tests/components/environment_canada/ @gwww @michaeldavie
@@ -464,6 +458,8 @@ build.json @home-assistant/supervisor
/tests/components/eufylife_ble/ @bdr99 /tests/components/eufylife_ble/ @bdr99
/homeassistant/components/event/ @home-assistant/core /homeassistant/components/event/ @home-assistant/core
/tests/components/event/ @home-assistant/core /tests/components/event/ @home-assistant/core
/homeassistant/components/evil_genius_labs/ @balloob
/tests/components/evil_genius_labs/ @balloob
/homeassistant/components/evohome/ @zxdavb /homeassistant/components/evohome/ @zxdavb
/tests/components/evohome/ @zxdavb /tests/components/evohome/ @zxdavb
/homeassistant/components/ezviz/ @RenierM26 /homeassistant/components/ezviz/ @RenierM26
@@ -513,8 +509,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/forked_daapd/ @uvjustin /homeassistant/components/forked_daapd/ @uvjustin
/tests/components/forked_daapd/ @uvjustin /tests/components/forked_daapd/ @uvjustin
/homeassistant/components/fortios/ @kimfrellsen /homeassistant/components/fortios/ @kimfrellsen
/homeassistant/components/foscam/ @Foscam-wangzhengyu /homeassistant/components/foscam/ @krmarien
/tests/components/foscam/ @Foscam-wangzhengyu /tests/components/foscam/ @krmarien
/homeassistant/components/freebox/ @hacf-fr @Quentame /homeassistant/components/freebox/ @hacf-fr @Quentame
/tests/components/freebox/ @hacf-fr @Quentame /tests/components/freebox/ @hacf-fr @Quentame
/homeassistant/components/freedompro/ @stefano055415 /homeassistant/components/freedompro/ @stefano055415
@@ -648,8 +644,6 @@ build.json @home-assistant/supervisor
/tests/components/homeassistant/ @home-assistant/core /tests/components/homeassistant/ @home-assistant/core
/homeassistant/components/homeassistant_alerts/ @home-assistant/core /homeassistant/components/homeassistant_alerts/ @home-assistant/core
/tests/components/homeassistant_alerts/ @home-assistant/core /tests/components/homeassistant_alerts/ @home-assistant/core
/homeassistant/components/homeassistant_connect_zbt2/ @home-assistant/core
/tests/components/homeassistant_connect_zbt2/ @home-assistant/core
/homeassistant/components/homeassistant_green/ @home-assistant/core /homeassistant/components/homeassistant_green/ @home-assistant/core
/tests/components/homeassistant_green/ @home-assistant/core /tests/components/homeassistant_green/ @home-assistant/core
/homeassistant/components/homeassistant_hardware/ @home-assistant/core /homeassistant/components/homeassistant_hardware/ @home-assistant/core
@@ -678,8 +672,8 @@ build.json @home-assistant/supervisor
/tests/components/http/ @home-assistant/core /tests/components/http/ @home-assistant/core
/homeassistant/components/huawei_lte/ @scop @fphammerle /homeassistant/components/huawei_lte/ @scop @fphammerle
/tests/components/huawei_lte/ @scop @fphammerle /tests/components/huawei_lte/ @scop @fphammerle
/homeassistant/components/hue/ @marcelveldt /homeassistant/components/hue/ @balloob @marcelveldt
/tests/components/hue/ @marcelveldt /tests/components/hue/ @balloob @marcelveldt
/homeassistant/components/huisbaasje/ @dennisschroer /homeassistant/components/huisbaasje/ @dennisschroer
/tests/components/huisbaasje/ @dennisschroer /tests/components/huisbaasje/ @dennisschroer
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka /homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
@@ -690,8 +684,8 @@ build.json @home-assistant/supervisor
/tests/components/husqvarna_automower/ @Thomas55555 /tests/components/husqvarna_automower/ @Thomas55555
/homeassistant/components/husqvarna_automower_ble/ @alistair23 /homeassistant/components/husqvarna_automower_ble/ @alistair23
/tests/components/husqvarna_automower_ble/ @alistair23 /tests/components/husqvarna_automower_ble/ @alistair23
/homeassistant/components/huum/ @frwickst @vincentwolsink /homeassistant/components/huum/ @frwickst
/tests/components/huum/ @frwickst @vincentwolsink /tests/components/huum/ @frwickst
/homeassistant/components/hvv_departures/ @vigonotion /homeassistant/components/hvv_departures/ @vigonotion
/tests/components/hvv_departures/ @vigonotion /tests/components/hvv_departures/ @vigonotion
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan /homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
@@ -751,8 +745,8 @@ build.json @home-assistant/supervisor
/tests/components/integration/ @dgomes /tests/components/integration/ @dgomes
/homeassistant/components/intellifire/ @jeeftor /homeassistant/components/intellifire/ @jeeftor
/tests/components/intellifire/ @jeeftor /tests/components/intellifire/ @jeeftor
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/intent/ @home-assistant/core @synesthesiam
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /tests/components/intent/ @home-assistant/core @synesthesiam
/homeassistant/components/intesishome/ @jnimmo /homeassistant/components/intesishome/ @jnimmo
/homeassistant/components/iometer/ @MaestroOnICe /homeassistant/components/iometer/ @MaestroOnICe
/tests/components/iometer/ @MaestroOnICe /tests/components/iometer/ @MaestroOnICe
@@ -860,14 +854,14 @@ build.json @home-assistant/supervisor
/tests/components/lg_netcast/ @Drafteed @splinter98 /tests/components/lg_netcast/ @Drafteed @splinter98
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration /homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
/tests/components/lg_thinq/ @LG-ThinQ-Integration /tests/components/lg_thinq/ @LG-ThinQ-Integration
/homeassistant/components/libre_hardware_monitor/ @Sab44
/tests/components/libre_hardware_monitor/ @Sab44
/homeassistant/components/lidarr/ @tkdrob /homeassistant/components/lidarr/ @tkdrob
/tests/components/lidarr/ @tkdrob /tests/components/lidarr/ @tkdrob
/homeassistant/components/lifx/ @Djelibeybi /homeassistant/components/lifx/ @Djelibeybi
/tests/components/lifx/ @Djelibeybi /tests/components/lifx/ @Djelibeybi
/homeassistant/components/light/ @home-assistant/core /homeassistant/components/light/ @home-assistant/core
/tests/components/light/ @home-assistant/core /tests/components/light/ @home-assistant/core
/homeassistant/components/linear_garage_door/ @IceBotYT
/tests/components/linear_garage_door/ @IceBotYT
/homeassistant/components/linkplay/ @Velleman /homeassistant/components/linkplay/ @Velleman
/tests/components/linkplay/ @Velleman /tests/components/linkplay/ @Velleman
/homeassistant/components/linux_battery/ @fabaff /homeassistant/components/linux_battery/ @fabaff
@@ -1108,8 +1102,8 @@ build.json @home-assistant/supervisor
/tests/components/onvif/ @hunterjm @jterrace /tests/components/onvif/ @hunterjm @jterrace
/homeassistant/components/open_meteo/ @frenck /homeassistant/components/open_meteo/ @frenck
/tests/components/open_meteo/ @frenck /tests/components/open_meteo/ @frenck
/homeassistant/components/open_router/ @joostlek /homeassistant/components/openai_conversation/ @balloob
/tests/components/open_router/ @joostlek /tests/components/openai_conversation/ @balloob
/homeassistant/components/openerz/ @misialq /homeassistant/components/openerz/ @misialq
/tests/components/openerz/ @misialq /tests/components/openerz/ @misialq
/homeassistant/components/openexchangerates/ @MartinHjelmare /homeassistant/components/openexchangerates/ @MartinHjelmare
@@ -1185,8 +1179,6 @@ build.json @home-assistant/supervisor
/tests/components/plum_lightpad/ @ColinHarrington @prystupa /tests/components/plum_lightpad/ @ColinHarrington @prystupa
/homeassistant/components/point/ @fredrike /homeassistant/components/point/ @fredrike
/tests/components/point/ @fredrike /tests/components/point/ @fredrike
/homeassistant/components/pooldose/ @lmaertin
/tests/components/pooldose/ @lmaertin
/homeassistant/components/poolsense/ @haemishkyd /homeassistant/components/poolsense/ @haemishkyd
/tests/components/poolsense/ @haemishkyd /tests/components/poolsense/ @haemishkyd
/homeassistant/components/powerfox/ @klaasnicolaas /homeassistant/components/powerfox/ @klaasnicolaas
@@ -1208,6 +1200,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/proximity/ @mib1185 /homeassistant/components/proximity/ @mib1185
/tests/components/proximity/ @mib1185 /tests/components/proximity/ @mib1185
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno /homeassistant/components/proxmoxve/ @jhollowe @Corbeno
/homeassistant/components/prusalink/ @balloob
/tests/components/prusalink/ @balloob
/homeassistant/components/ps4/ @ktnrg45 /homeassistant/components/ps4/ @ktnrg45
/tests/components/ps4/ @ktnrg45 /tests/components/ps4/ @ktnrg45
/homeassistant/components/pterodactyl/ @elmurato /homeassistant/components/pterodactyl/ @elmurato
@@ -1301,8 +1295,8 @@ build.json @home-assistant/supervisor
/tests/components/rflink/ @javicalle /tests/components/rflink/ @javicalle
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221 /homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
/tests/components/rfxtrx/ @danielhiversen @elupus @RobBie1221 /tests/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
/homeassistant/components/rhasspy/ @synesthesiam /homeassistant/components/rhasspy/ @balloob @synesthesiam
/tests/components/rhasspy/ @synesthesiam /tests/components/rhasspy/ @balloob @synesthesiam
/homeassistant/components/ridwell/ @bachya /homeassistant/components/ridwell/ @bachya
/tests/components/ridwell/ @bachya /tests/components/ridwell/ @bachya
/homeassistant/components/ring/ @sdb9696 /homeassistant/components/ring/ @sdb9696
@@ -1390,14 +1384,12 @@ build.json @home-assistant/supervisor
/tests/components/seventeentrack/ @shaiu /tests/components/seventeentrack/ @shaiu
/homeassistant/components/sfr_box/ @epenet /homeassistant/components/sfr_box/ @epenet
/tests/components/sfr_box/ @epenet /tests/components/sfr_box/ @epenet
/homeassistant/components/sftp_storage/ @maretodoric
/tests/components/sftp_storage/ @maretodoric
/homeassistant/components/sharkiq/ @JeffResc @funkybunch /homeassistant/components/sharkiq/ @JeffResc @funkybunch
/tests/components/sharkiq/ @JeffResc @funkybunch /tests/components/sharkiq/ @JeffResc @funkybunch
/homeassistant/components/shell_command/ @home-assistant/core /homeassistant/components/shell_command/ @home-assistant/core
/tests/components/shell_command/ @home-assistant/core /tests/components/shell_command/ @home-assistant/core
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco /homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
/tests/components/shelly/ @bieniu @thecode @chemelli74 @bdraco /tests/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
/homeassistant/components/shodan/ @fabaff /homeassistant/components/shodan/ @fabaff
/homeassistant/components/sia/ @eavanvalkenburg /homeassistant/components/sia/ @eavanvalkenburg
/tests/components/sia/ @eavanvalkenburg /tests/components/sia/ @eavanvalkenburg
@@ -1421,8 +1413,6 @@ build.json @home-assistant/supervisor
/tests/components/skybell/ @tkdrob /tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau /homeassistant/components/slack/ @tkdrob @fletcherau
/tests/components/slack/ @tkdrob @fletcherau /tests/components/slack/ @tkdrob @fletcherau
/homeassistant/components/sleep_as_android/ @tr4nt0r
/tests/components/sleep_as_android/ @tr4nt0r
/homeassistant/components/sleepiq/ @mfugate1 @kbickar /homeassistant/components/sleepiq/ @mfugate1 @kbickar
/tests/components/sleepiq/ @mfugate1 @kbickar /tests/components/sleepiq/ @mfugate1 @kbickar
/homeassistant/components/slide/ @ualex73 /homeassistant/components/slide/ @ualex73
@@ -1544,8 +1534,8 @@ build.json @home-assistant/supervisor
/tests/components/systemmonitor/ @gjohansson-ST /tests/components/systemmonitor/ @gjohansson-ST
/homeassistant/components/tado/ @erwindouna /homeassistant/components/tado/ @erwindouna
/tests/components/tado/ @erwindouna /tests/components/tado/ @erwindouna
/homeassistant/components/tag/ @home-assistant/core /homeassistant/components/tag/ @balloob @dmulcahey
/tests/components/tag/ @home-assistant/core /tests/components/tag/ @balloob @dmulcahey
/homeassistant/components/tailscale/ @frenck /homeassistant/components/tailscale/ @frenck
/tests/components/tailscale/ @frenck /tests/components/tailscale/ @frenck
/homeassistant/components/tailwind/ @frenck /homeassistant/components/tailwind/ @frenck
@@ -1605,8 +1595,6 @@ build.json @home-assistant/supervisor
/tests/components/todo/ @home-assistant/core /tests/components/todo/ @home-assistant/core
/homeassistant/components/todoist/ @boralyl /homeassistant/components/todoist/ @boralyl
/tests/components/todoist/ @boralyl /tests/components/todoist/ @boralyl
/homeassistant/components/togrill/ @elupus
/tests/components/togrill/ @elupus
/homeassistant/components/tolo/ @MatthiasLohr /homeassistant/components/tolo/ @MatthiasLohr
/tests/components/tolo/ @MatthiasLohr /tests/components/tolo/ @MatthiasLohr
/homeassistant/components/tomorrowio/ @raman325 @lymanepp /homeassistant/components/tomorrowio/ @raman325 @lymanepp
@@ -1621,6 +1609,8 @@ build.json @home-assistant/supervisor
/tests/components/tplink_omada/ @MarkGodwin /tests/components/tplink_omada/ @MarkGodwin
/homeassistant/components/traccar/ @ludeeus /homeassistant/components/traccar/ @ludeeus
/tests/components/traccar/ @ludeeus /tests/components/traccar/ @ludeeus
/homeassistant/components/traccar_server/ @ludeeus
/tests/components/traccar_server/ @ludeeus
/homeassistant/components/trace/ @home-assistant/core /homeassistant/components/trace/ @home-assistant/core
/tests/components/trace/ @home-assistant/core /tests/components/trace/ @home-assistant/core
/homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu /homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu
@@ -1668,8 +1658,6 @@ build.json @home-assistant/supervisor
/tests/components/upnp/ @StevenLooman /tests/components/upnp/ @StevenLooman
/homeassistant/components/uptime/ @frenck /homeassistant/components/uptime/ @frenck
/tests/components/uptime/ @frenck /tests/components/uptime/ @frenck
/homeassistant/components/uptime_kuma/ @tr4nt0r
/tests/components/uptime_kuma/ @tr4nt0r
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74 /homeassistant/components/uptimerobot/ @ludeeus @chemelli74
/tests/components/uptimerobot/ @ludeeus @chemelli74 /tests/components/uptimerobot/ @ludeeus @chemelli74
/homeassistant/components/usb/ @bdraco /homeassistant/components/usb/ @bdraco
@@ -1690,15 +1678,15 @@ build.json @home-assistant/supervisor
/tests/components/vegehub/ @ghowevege /tests/components/vegehub/ @ghowevege
/homeassistant/components/velbus/ @Cereal2nd @brefra /homeassistant/components/velbus/ @Cereal2nd @brefra
/tests/components/velbus/ @Cereal2nd @brefra /tests/components/velbus/ @Cereal2nd @brefra
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew /homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew /tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
/homeassistant/components/venstar/ @garbled1 @jhollowe /homeassistant/components/venstar/ @garbled1 @jhollowe
/tests/components/venstar/ @garbled1 @jhollowe /tests/components/venstar/ @garbled1 @jhollowe
/homeassistant/components/versasense/ @imstevenxyz /homeassistant/components/versasense/ @imstevenxyz
/homeassistant/components/version/ @ludeeus /homeassistant/components/version/ @ludeeus
/tests/components/version/ @ludeeus /tests/components/version/ @ludeeus
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven /homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven /tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
/homeassistant/components/vicare/ @CFenner /homeassistant/components/vicare/ @CFenner
/tests/components/vicare/ @CFenner /tests/components/vicare/ @CFenner
/homeassistant/components/vilfo/ @ManneW /homeassistant/components/vilfo/ @ManneW
@@ -1710,14 +1698,14 @@ build.json @home-assistant/supervisor
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare /tests/components/vlc_telnet/ @rodripf @MartinHjelmare
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74 /homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
/tests/components/vodafone_station/ @paoloantinori @chemelli74 /tests/components/vodafone_station/ @paoloantinori @chemelli74
/homeassistant/components/voip/ @synesthesiam @jaminh /homeassistant/components/voip/ @balloob @synesthesiam @jaminh
/tests/components/voip/ @synesthesiam @jaminh /tests/components/voip/ @balloob @synesthesiam @jaminh
/homeassistant/components/volumio/ @OnFreund /homeassistant/components/volumio/ @OnFreund
/tests/components/volumio/ @OnFreund /tests/components/volumio/ @OnFreund
/homeassistant/components/volvo/ @thomasddn
/tests/components/volvo/ @thomasddn
/homeassistant/components/volvooncall/ @molobrakos /homeassistant/components/volvooncall/ @molobrakos
/tests/components/volvooncall/ @molobrakos /tests/components/volvooncall/ @molobrakos
/homeassistant/components/vulcan/ @Antoni-Czaplicki
/tests/components/vulcan/ @Antoni-Czaplicki
/homeassistant/components/wake_on_lan/ @ntilley905 /homeassistant/components/wake_on_lan/ @ntilley905
/tests/components/wake_on_lan/ @ntilley905 /tests/components/wake_on_lan/ @ntilley905
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam /homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
@@ -1768,8 +1756,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/wirelesstag/ @sergeymaysak /homeassistant/components/wirelesstag/ @sergeymaysak
/homeassistant/components/withings/ @joostlek /homeassistant/components/withings/ @joostlek
/tests/components/withings/ @joostlek /tests/components/withings/ @joostlek
/homeassistant/components/wiz/ @sbidy @arturpragacz /homeassistant/components/wiz/ @sbidy
/tests/components/wiz/ @sbidy @arturpragacz /tests/components/wiz/ @sbidy
/homeassistant/components/wled/ @frenck /homeassistant/components/wled/ @frenck
/tests/components/wled/ @frenck /tests/components/wled/ @frenck
/homeassistant/components/wmspro/ @mback2k /homeassistant/components/wmspro/ @mback2k
@@ -1782,8 +1770,8 @@ build.json @home-assistant/supervisor
/tests/components/worldclock/ @fabaff /tests/components/worldclock/ @fabaff
/homeassistant/components/ws66i/ @ssaenger /homeassistant/components/ws66i/ @ssaenger
/tests/components/ws66i/ @ssaenger /tests/components/ws66i/ @ssaenger
/homeassistant/components/wyoming/ @synesthesiam /homeassistant/components/wyoming/ @balloob @synesthesiam
/tests/components/wyoming/ @synesthesiam /tests/components/wyoming/ @balloob @synesthesiam
/homeassistant/components/xbox/ @hunterjm /homeassistant/components/xbox/ @hunterjm
/tests/components/xbox/ @hunterjm /tests/components/xbox/ @hunterjm
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi /homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi

View File

@@ -14,8 +14,5 @@ Still interested? Then you should take a peek at the [developer documentation](h
## Feature suggestions ## Feature suggestions
If you want to suggest a new feature for Home Assistant (e.g. new integrations), please [start a discussion](https://github.com/orgs/home-assistant/discussions) on GitHub. If you want to suggest a new feature for Home Assistant (e.g., new integrations), please open a thread in our [Community Forum: Feature Requests](https://community.home-assistant.io/c/feature-requests).
We use [GitHub for tracking issues](https://github.com/home-assistant/core/issues), not for tracking feature requests.
## Issue Tracker
If you want to report an issue, please [create an issue](https://github.com/home-assistant/core/issues) on GitHub.

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version && go2rtc --version
# Install uv # Install uv
RUN pip3 install uv==0.8.9 RUN pip3 install uv==0.7.1
WORKDIR /usr/src WORKDIR /usr/src

View File

@@ -3,7 +3,8 @@ FROM mcr.microsoft.com/vscode/devcontainers/base:debian
SHELL ["/bin/bash", "-o", "pipefail", "-c"] SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN \ RUN \
apt-get update \ curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
# Additional library needed by some tests and accordingly by VScode Tests Discovery # Additional library needed by some tests and accordingly by VScode Tests Discovery
bluez \ bluez \

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant image: ghcr.io/home-assistant/{arch}-homeassistant
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.0 aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.0 armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.0 armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.0 amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.0 i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io

View File

@@ -187,42 +187,36 @@ def main() -> int:
from . import config, runner # noqa: PLC0415 from . import config, runner # noqa: PLC0415
# Ensure only one instance runs per config directory safe_mode = config.safe_mode_enabled(config_dir)
with runner.ensure_single_execution(config_dir) as single_execution_lock:
# Check if another instance is already running
if single_execution_lock.exit_code is not None:
return single_execution_lock.exit_code
safe_mode = config.safe_mode_enabled(config_dir) runtime_conf = runner.RuntimeConfig(
config_dir=config_dir,
verbose=args.verbose,
log_rotate_days=args.log_rotate_days,
log_file=args.log_file,
log_no_color=args.log_no_color,
skip_pip=args.skip_pip,
skip_pip_packages=args.skip_pip_packages,
recovery_mode=args.recovery_mode,
debug=args.debug,
open_ui=args.open_ui,
safe_mode=safe_mode,
)
runtime_conf = runner.RuntimeConfig( fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
config_dir=config_dir, with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
verbose=args.verbose, faulthandler.enable(fault_file)
log_rotate_days=args.log_rotate_days, exit_code = runner.run(runtime_conf)
log_file=args.log_file, faulthandler.disable()
log_no_color=args.log_no_color,
skip_pip=args.skip_pip,
skip_pip_packages=args.skip_pip_packages,
recovery_mode=args.recovery_mode,
debug=args.debug,
open_ui=args.open_ui,
safe_mode=safe_mode,
)
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME) # It's possible for the fault file to disappear, so suppress obvious errors
with open(fault_file_name, mode="a", encoding="utf8") as fault_file: with suppress(FileNotFoundError):
faulthandler.enable(fault_file) if os.path.getsize(fault_file_name) == 0:
exit_code = runner.run(runtime_conf) os.remove(fault_file_name)
faulthandler.disable()
# It's possible for the fault file to disappear, so suppress obvious errors check_threads()
with suppress(FileNotFoundError):
if os.path.getsize(fault_file_name) == 0:
os.remove(fault_file_name)
check_threads() return exit_code
return exit_code
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -120,9 +120,6 @@ class AuthStore:
new_user = models.User(**kwargs) new_user = models.User(**kwargs)
while new_user.id in self._users:
new_user = models.User(**kwargs)
self._users[new_user.id] = new_user self._users[new_user.id] = new_user
if credentials is None: if credentials is None:

View File

@@ -27,7 +27,7 @@ from . import (
SetupFlow, SetupFlow,
) )
REQUIREMENTS = ["pyotp==2.9.0"] REQUIREMENTS = ["pyotp==2.8.0"]
CONF_MESSAGE = "message" CONF_MESSAGE = "message"

View File

@@ -20,7 +20,7 @@ from . import (
SetupFlow, SetupFlow,
) )
REQUIREMENTS = ["pyotp==2.9.0", "PyQRCode==1.2.1"] REQUIREMENTS = ["pyotp==2.8.0", "PyQRCode==1.2.1"]
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA) CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)

View File

@@ -33,10 +33,7 @@ class AuthFlowContext(FlowContext, total=False):
redirect_uri: str redirect_uri: str
class AuthFlowResult(FlowResult[AuthFlowContext, tuple[str, str]], total=False): AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
"""Typed result dict for auth flow."""
result: Credentials # Only present if type is CREATE_ENTRY
@attr.s(slots=True) @attr.s(slots=True)

View File

@@ -332,9 +332,6 @@ async def async_setup_hass(
if not is_virtual_env(): if not is_virtual_env():
await async_mount_local_lib_path(runtime_config.config_dir) await async_mount_local_lib_path(runtime_config.config_dir)
if hass.config.safe_mode:
_LOGGER.info("Starting in safe mode")
basic_setup_success = ( basic_setup_success = (
await async_from_config_dict(config_dict, hass) is not None await async_from_config_dict(config_dict, hass) is not None
) )
@@ -387,6 +384,8 @@ async def async_setup_hass(
{"recovery_mode": {}, "http": http_conf}, {"recovery_mode": {}, "http": http_conf},
hass, hass,
) )
elif hass.config.safe_mode:
_LOGGER.info("Starting in safe mode")
if runtime_config.open_ui: if runtime_config.open_ui:
hass.add_job(open_hass_ui, hass) hass.add_job(open_hass_ui, hass)
@@ -695,10 +694,10 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]: def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
"""Get domains of components to set up.""" """Get domains of components to set up."""
# The common config section [homeassistant] could be filtered here, # Filter out the repeating and common config section [homeassistant]
# but that is not necessary, since it corresponds to the core integration, domains = {
# that is always unconditionally loaded. domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
domains = {cv.domain_key(key) for key in config} }
# Add config entry and default domains # Add config entry and default domains
if not hass.config.recovery_mode: if not hass.config.recovery_mode:
@@ -726,28 +725,34 @@ async def _async_resolve_domains_and_preload(
together with all their dependencies. together with all their dependencies.
""" """
domains_to_setup = _get_domains(hass, config) domains_to_setup = _get_domains(hass, config)
platform_integrations = conf_util.extract_platform_integrations(
# Also process all base platforms since we do not require the manifest config, BASE_PLATFORMS
# to list them as dependencies. )
# We want to later avoid lock contention when multiple integrations try to load # Ensure base platforms that have platform integrations are added to `domains`,
# their manifests at once. # so they can be setup first instead of discovering them later when a config
# entry setup task notices that it's needed and there is already a long line
# to use the import executor.
# #
# Additionally process integrations that are defined under base platforms
# to speed things up.
# For example if we have # For example if we have
# sensor: # sensor:
# - platform: template # - platform: template
# #
# `template` has to be loaded to validate the config for sensor. # `template` has to be loaded to validate the config for sensor
# The more platforms under `sensor:`, the longer # so we want to start loading `sensor` as soon as we know
# it will be needed. The more platforms under `sensor:`, the longer
# it will take to finish setup for `sensor` because each of these # it will take to finish setup for `sensor` because each of these
# platforms has to be imported before we can validate the config. # platforms has to be imported before we can validate the config.
# #
# Thankfully we are migrating away from the platform pattern # Thankfully we are migrating away from the platform pattern
# so this will be less of a problem in the future. # so this will be less of a problem in the future.
platform_integrations = conf_util.extract_platform_integrations( domains_to_setup.update(platform_integrations)
config, BASE_PLATFORMS
) # Additionally process base platforms since we do not require the manifest
# to list them as dependencies.
# We want to later avoid lock contention when multiple integrations try to load
# their manifests at once.
# Also process integrations that are defined under base platforms
# to speed things up.
additional_domains_to_process = { additional_domains_to_process = {
*BASE_PLATFORMS, *BASE_PLATFORMS,
*chain.from_iterable(platform_integrations.values()), *chain.from_iterable(platform_integrations.values()),
@@ -865,9 +870,9 @@ async def _async_set_up_integrations(
domains = set(integrations) & all_domains domains = set(integrations) & all_domains
_LOGGER.info( _LOGGER.info(
"Domains to be set up: %s\nDependencies: %s", "Domains to be set up: %s | %s",
domains or "{}", domains,
(all_domains - domains) or "{}", all_domains - domains,
) )
async_set_domains_to_be_loaded(hass, all_domains) async_set_domains_to_be_loaded(hass, all_domains)
@@ -908,13 +913,12 @@ async def _async_set_up_integrations(
stage_all_domains = stage_domains | stage_dep_domains stage_all_domains = stage_domains | stage_dep_domains
_LOGGER.info( _LOGGER.info(
"Setting up stage %s: %s; already set up: %s\n" "Setting up stage %s: %s | %s\nDependencies: %s | %s",
"Dependencies: %s; already set up: %s",
name, name,
stage_domains, stage_domains,
(stage_domains_unfiltered - stage_domains) or "{}", stage_domains_unfiltered - stage_domains,
stage_dep_domains or "{}", stage_dep_domains,
(stage_dep_domains_unfiltered - stage_dep_domains) or "{}", stage_dep_domains_unfiltered - stage_dep_domains,
) )
if timeout is None: if timeout is None:

View File

@@ -1,5 +0,0 @@
{
"domain": "frient",
"name": "Frient",
"iot_standards": ["zigbee"]
}

View File

@@ -1,5 +1,5 @@
{ {
"domain": "fritzbox", "domain": "fritzbox",
"name": "FRITZ!", "name": "FRITZ!Box",
"integrations": ["fritz", "fritzbox", "fritzbox_callmonitor"] "integrations": ["fritz", "fritzbox", "fritzbox_callmonitor"]
} }

View File

@@ -1,5 +1,5 @@
{ {
"domain": "third_reality", "domain": "third_reality",
"name": "Third Reality", "name": "Third Reality",
"iot_standards": ["matter", "zigbee"] "iot_standards": ["zigbee"]
} }

View File

@@ -1,5 +1,5 @@
{ {
"domain": "ubiquiti", "domain": "ubiquiti",
"name": "Ubiquiti", "name": "Ubiquiti",
"integrations": ["airos", "unifi", "unifi_direct", "unifiled", "unifiprotect"] "integrations": ["unifi", "unifi_direct", "unifiled", "unifiprotect"]
} }

View File

@@ -3,10 +3,8 @@
import logging import logging
from typing import Any from typing import Any
from aiohttp import web
import voluptuous as vol import voluptuous as vol
from homeassistant.components.http import KEY_HASS, HomeAssistantView
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
from homeassistant.core import ( from homeassistant.core import (
@@ -22,30 +20,19 @@ from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
from .const import ( from .const import (
ATTR_ATTACHMENTS,
ATTR_INSTRUCTIONS, ATTR_INSTRUCTIONS,
ATTR_REQUIRED, ATTR_REQUIRED,
ATTR_STRUCTURE, ATTR_STRUCTURE,
ATTR_TASK_NAME, ATTR_TASK_NAME,
DATA_COMPONENT, DATA_COMPONENT,
DATA_IMAGES,
DATA_PREFERENCES, DATA_PREFERENCES,
DOMAIN, DOMAIN,
SERVICE_GENERATE_DATA, SERVICE_GENERATE_DATA,
SERVICE_GENERATE_IMAGE,
AITaskEntityFeature, AITaskEntityFeature,
) )
from .entity import AITaskEntity from .entity import AITaskEntity
from .http import async_setup as async_setup_http from .http import async_setup as async_setup_http
from .task import ( from .task import GenDataTask, GenDataTaskResult, async_generate_data
GenDataTask,
GenDataTaskResult,
GenImageTask,
GenImageTaskResult,
ImageData,
async_generate_data,
async_generate_image,
)
__all__ = [ __all__ = [
"DOMAIN", "DOMAIN",
@@ -53,11 +40,7 @@ __all__ = [
"AITaskEntityFeature", "AITaskEntityFeature",
"GenDataTask", "GenDataTask",
"GenDataTaskResult", "GenDataTaskResult",
"GenImageTask",
"GenImageTaskResult",
"ImageData",
"async_generate_data", "async_generate_data",
"async_generate_image",
"async_setup", "async_setup",
"async_setup_entry", "async_setup_entry",
"async_unload_entry", "async_unload_entry",
@@ -94,10 +77,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass) entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component hass.data[DATA_COMPONENT] = entity_component
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass) hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
hass.data[DATA_IMAGES] = {}
await hass.data[DATA_PREFERENCES].async_load() await hass.data[DATA_PREFERENCES].async_load()
async_setup_http(hass) async_setup_http(hass)
hass.http.register_view(ImageView)
hass.services.async_register( hass.services.async_register(
DOMAIN, DOMAIN,
SERVICE_GENERATE_DATA, SERVICE_GENERATE_DATA,
@@ -111,26 +92,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
vol.Schema({str: STRUCTURE_FIELD_SCHEMA}), vol.Schema({str: STRUCTURE_FIELD_SCHEMA}),
_validate_structure_fields, _validate_structure_fields,
), ),
vol.Optional(ATTR_ATTACHMENTS): vol.All(
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
),
}
),
supports_response=SupportsResponse.ONLY,
job_type=HassJobType.Coroutinefunction,
)
hass.services.async_register(
DOMAIN,
SERVICE_GENERATE_IMAGE,
async_service_generate_image,
schema=vol.Schema(
{
vol.Required(ATTR_TASK_NAME): cv.string,
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_INSTRUCTIONS): cv.string,
vol.Optional(ATTR_ATTACHMENTS): vol.All(
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
),
} }
), ),
supports_response=SupportsResponse.ONLY, supports_response=SupportsResponse.ONLY,
@@ -150,16 +111,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_service_generate_data(call: ServiceCall) -> ServiceResponse: async def async_service_generate_data(call: ServiceCall) -> ServiceResponse:
"""Run the data task service.""" """Run the run task service."""
result = await async_generate_data(hass=call.hass, **call.data) result = await async_generate_data(hass=call.hass, **call.data)
return result.as_dict() return result.as_dict()
async def async_service_generate_image(call: ServiceCall) -> ServiceResponse:
"""Run the image task service."""
return await async_generate_image(hass=call.hass, **call.data)
class AITaskPreferences: class AITaskPreferences:
"""AI Task preferences.""" """AI Task preferences."""
@@ -204,28 +160,3 @@ class AITaskPreferences:
def as_dict(self) -> dict[str, str | None]: def as_dict(self) -> dict[str, str | None]:
"""Get the current preferences.""" """Get the current preferences."""
return {key: getattr(self, key) for key in self.KEYS} return {key: getattr(self, key) for key in self.KEYS}
class ImageView(HomeAssistantView):
"""View to generated images."""
url = f"/api/{DOMAIN}/images/{{filename}}"
name = f"api:{DOMAIN}/images"
async def get(
self,
request: web.Request,
filename: str,
) -> web.Response:
"""Serve image."""
hass = request.app[KEY_HASS]
image_storage = hass.data[DATA_IMAGES]
image_data = image_storage.get(filename)
if image_data is None:
raise web.HTTPNotFound
return web.Response(
body=image_data.data,
content_type=image_data.mime_type,
)

View File

@@ -12,24 +12,17 @@ if TYPE_CHECKING:
from . import AITaskPreferences from . import AITaskPreferences
from .entity import AITaskEntity from .entity import AITaskEntity
from .task import ImageData
DOMAIN = "ai_task" DOMAIN = "ai_task"
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN) DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences") DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
MAX_IMAGES = 20
SERVICE_GENERATE_DATA = "generate_data" SERVICE_GENERATE_DATA = "generate_data"
SERVICE_GENERATE_IMAGE = "generate_image"
ATTR_INSTRUCTIONS: Final = "instructions" ATTR_INSTRUCTIONS: Final = "instructions"
ATTR_TASK_NAME: Final = "task_name" ATTR_TASK_NAME: Final = "task_name"
ATTR_STRUCTURE: Final = "structure" ATTR_STRUCTURE: Final = "structure"
ATTR_REQUIRED: Final = "required" ATTR_REQUIRED: Final = "required"
ATTR_ATTACHMENTS: Final = "attachments"
DEFAULT_SYSTEM_PROMPT = ( DEFAULT_SYSTEM_PROMPT = (
"You are a Home Assistant expert and help users with their tasks." "You are a Home Assistant expert and help users with their tasks."
@@ -41,9 +34,3 @@ class AITaskEntityFeature(IntFlag):
GENERATE_DATA = 1 GENERATE_DATA = 1
"""Generate data based on instructions.""" """Generate data based on instructions."""
SUPPORT_ATTACHMENTS = 2
"""Support attachments with generate data."""
GENERATE_IMAGE = 4
"""Generate images based on instructions."""

View File

@@ -13,12 +13,12 @@ from homeassistant.components.conversation import (
) )
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.helpers import llm from homeassistant.helpers import llm
from homeassistant.helpers.chat_session import ChatSession from homeassistant.helpers.chat_session import async_get_chat_session
from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.util import dt as dt_util from homeassistant.util import dt as dt_util
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN, AITaskEntityFeature from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN, AITaskEntityFeature
from .task import GenDataTask, GenDataTaskResult, GenImageTask, GenImageTaskResult from .task import GenDataTask, GenDataTaskResult
class AITaskEntity(RestoreEntity): class AITaskEntity(RestoreEntity):
@@ -56,12 +56,12 @@ class AITaskEntity(RestoreEntity):
@contextlib.asynccontextmanager @contextlib.asynccontextmanager
async def _async_get_ai_task_chat_log( async def _async_get_ai_task_chat_log(
self, self,
session: ChatSession, task: GenDataTask,
task: GenDataTask | GenImageTask,
) -> AsyncGenerator[ChatLog]: ) -> AsyncGenerator[ChatLog]:
"""Context manager used to manage the ChatLog used during an AI Task.""" """Context manager used to manage the ChatLog used during an AI Task."""
# pylint: disable-next=contextmanager-generator-missing-cleanup # pylint: disable-next=contextmanager-generator-missing-cleanup
with ( with (
async_get_chat_session(self.hass) as session,
async_get_chat_log( async_get_chat_log(
self.hass, self.hass,
session, session,
@@ -79,22 +79,19 @@ class AITaskEntity(RestoreEntity):
user_llm_prompt=DEFAULT_SYSTEM_PROMPT, user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
) )
chat_log.async_add_user_content( chat_log.async_add_user_content(UserContent(task.instructions))
UserContent(task.instructions, attachments=task.attachments)
)
yield chat_log yield chat_log
@final @final
async def internal_async_generate_data( async def internal_async_generate_data(
self, self,
session: ChatSession,
task: GenDataTask, task: GenDataTask,
) -> GenDataTaskResult: ) -> GenDataTaskResult:
"""Run a gen data task.""" """Run a gen data task."""
self.__last_activity = dt_util.utcnow().isoformat() self.__last_activity = dt_util.utcnow().isoformat()
self.async_write_ha_state() self.async_write_ha_state()
async with self._async_get_ai_task_chat_log(session, task) as chat_log: async with self._async_get_ai_task_chat_log(task) as chat_log:
return await self._async_generate_data(task, chat_log) return await self._async_generate_data(task, chat_log)
async def _async_generate_data( async def _async_generate_data(
@@ -104,23 +101,3 @@ class AITaskEntity(RestoreEntity):
) -> GenDataTaskResult: ) -> GenDataTaskResult:
"""Handle a gen data task.""" """Handle a gen data task."""
raise NotImplementedError raise NotImplementedError
@final
async def internal_async_generate_image(
self,
session: ChatSession,
task: GenImageTask,
) -> GenImageTaskResult:
"""Run a gen image task."""
self.__last_activity = dt_util.utcnow().isoformat()
self.async_write_ha_state()
async with self._async_get_ai_task_chat_log(session, task) as chat_log:
return await self._async_generate_image(task, chat_log)
async def _async_generate_image(
self,
task: GenImageTask,
chat_log: ChatLog,
) -> GenImageTaskResult:
"""Handle a gen image task."""
raise NotImplementedError

View File

@@ -1,15 +1,7 @@
{ {
"entity_component": {
"_": {
"default": "mdi:star-four-points"
}
},
"services": { "services": {
"generate_data": { "generate_data": {
"service": "mdi:file-star-four-points-outline" "service": "mdi:file-star-four-points-outline"
},
"generate_image": {
"service": "mdi:star-four-points-box-outline"
} }
} }
} }

View File

@@ -1,10 +1,9 @@
{ {
"domain": "ai_task", "domain": "ai_task",
"name": "AI Task", "name": "AI Task",
"after_dependencies": ["camera", "http"],
"codeowners": ["@home-assistant/core"], "codeowners": ["@home-assistant/core"],
"dependencies": ["conversation", "media_source"], "dependencies": ["conversation"],
"documentation": "https://www.home-assistant.io/integrations/ai_task", "documentation": "https://www.home-assistant.io/integrations/ai_task",
"integration_type": "entity", "integration_type": "system",
"quality_scale": "internal" "quality_scale": "internal"
} }

View File

@@ -1,90 +0,0 @@
"""Expose images as media sources."""
from __future__ import annotations
from datetime import timedelta
import logging
from homeassistant.components.http.auth import async_sign_path
from homeassistant.components.media_player import BrowseError, MediaClass
from homeassistant.components.media_source import (
BrowseMediaSource,
MediaSource,
MediaSourceItem,
PlayMedia,
Unresolvable,
)
from homeassistant.core import HomeAssistant
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
_LOGGER = logging.getLogger(__name__)
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
"""Set up image media source."""
_LOGGER.debug("Setting up image media source")
return ImageMediaSource(hass)
class ImageMediaSource(MediaSource):
"""Provide images as media sources."""
name: str = "AI Generated Images"
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize ImageMediaSource."""
super().__init__(DOMAIN)
self.hass = hass
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
image_storage = self.hass.data[DATA_IMAGES]
image = image_storage.get(item.identifier)
if image is None:
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
return PlayMedia(
async_sign_path(
self.hass,
f"/api/{DOMAIN}/images/{item.identifier}",
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
),
image.mime_type,
)
async def async_browse_media(
self,
item: MediaSourceItem,
) -> BrowseMediaSource:
"""Return media."""
if item.identifier:
raise BrowseError("Unknown item")
image_storage = self.hass.data[DATA_IMAGES]
children = [
BrowseMediaSource(
domain=DOMAIN,
identifier=filename,
media_class=MediaClass.IMAGE,
media_content_type=image.mime_type,
title=image.title or filename,
can_play=True,
can_expand=False,
)
for filename, image in image_storage.items()
]
return BrowseMediaSource(
domain=DOMAIN,
identifier=None,
media_class=MediaClass.APP,
media_content_type="",
title="AI Generated Images",
can_play=False,
can_expand=True,
children_media_class=MediaClass.IMAGE,
children=children,
)

View File

@@ -10,50 +10,16 @@ generate_data:
required: true required: true
selector: selector:
text: text:
multiline: true
entity_id: entity_id:
required: false required: false
selector: selector:
entity: entity:
filter: domain: ai_task
domain: ai_task supported_features:
supported_features: - ai_task.AITaskEntityFeature.GENERATE_DATA
- ai_task.AITaskEntityFeature.GENERATE_DATA
structure: structure:
advanced: true
required: false required: false
example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }' example: '{ "name": { "selector": { "text": }, "description": "Name of the user", "required": "True" } } }, "age": { "selector": { "number": }, "description": "Age of the user" } }'
selector: selector:
object: object:
attachments:
required: false
selector:
media:
accept:
- "*"
generate_image:
fields:
task_name:
example: "picture of a dog"
required: true
selector:
text:
instructions:
example: "Generate a high quality square image of a dog on transparent background"
required: true
selector:
text:
multiline: true
entity_id:
required: true
selector:
entity:
filter:
domain: ai_task
supported_features:
- ai_task.AITaskEntityFeature.GENERATE_IMAGE
attachments:
required: false
selector:
media:
accept:
- "*"

View File

@@ -19,32 +19,6 @@
"structure": { "structure": {
"name": "Structured output", "name": "Structured output",
"description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field." "description": "When set, the AI Task will output fields with this in structure. The structure is a dictionary where the keys are the field names and the values contain a 'description', a 'selector', and an optional 'required' field."
},
"attachments": {
"name": "Attachments",
"description": "List of files to attach for multi-modal AI analysis."
}
}
},
"generate_image": {
"name": "Generate image",
"description": "Uses AI to generate image.",
"fields": {
"task_name": {
"name": "Task name",
"description": "Name of the task."
},
"instructions": {
"name": "Instructions",
"description": "Instructions that explains the image to be generated."
},
"entity_id": {
"name": "Entity ID",
"description": "Entity ID to run the task on."
},
"attachments": {
"name": "Attachments",
"description": "List of files to attach for using as references."
} }
} }
} }

View File

@@ -3,109 +3,14 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime, timedelta
from functools import partial
import mimetypes
from pathlib import Path
import tempfile
from typing import Any from typing import Any
import voluptuous as vol import voluptuous as vol
from homeassistant.components import camera, conversation, media_source from homeassistant.core import HomeAssistant
from homeassistant.components.http.auth import async_sign_path
from homeassistant.core import HomeAssistant, ServiceResponse, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.network import get_url
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
from .const import ( from .const import DATA_COMPONENT, DATA_PREFERENCES, AITaskEntityFeature
DATA_COMPONENT,
DATA_IMAGES,
DATA_PREFERENCES,
DOMAIN,
IMAGE_EXPIRY_TIME,
MAX_IMAGES,
AITaskEntityFeature,
)
def _save_camera_snapshot(image: camera.Image) -> Path:
"""Save camera snapshot to temp file."""
with tempfile.NamedTemporaryFile(
mode="wb",
suffix=mimetypes.guess_extension(image.content_type, False),
delete=False,
) as temp_file:
temp_file.write(image.content)
return Path(temp_file.name)
async def _resolve_attachments(
hass: HomeAssistant,
session: ChatSession,
attachments: list[dict] | None = None,
) -> list[conversation.Attachment]:
"""Resolve attachments for a task."""
resolved_attachments: list[conversation.Attachment] = []
created_files: list[Path] = []
for attachment in attachments or []:
media_content_id = attachment["media_content_id"]
# Special case for camera media sources
if media_content_id.startswith("media-source://camera/"):
# Extract entity_id from the media content ID
entity_id = media_content_id.removeprefix("media-source://camera/")
# Get snapshot from camera
image = await camera.async_get_image(hass, entity_id)
temp_filename = await hass.async_add_executor_job(
_save_camera_snapshot, image
)
created_files.append(temp_filename)
resolved_attachments.append(
conversation.Attachment(
media_content_id=media_content_id,
mime_type=image.content_type,
path=temp_filename,
)
)
else:
# Handle regular media sources
media = await media_source.async_resolve_media(hass, media_content_id, None)
if media.path is None:
raise HomeAssistantError(
"Only local attachments are currently supported"
)
resolved_attachments.append(
conversation.Attachment(
media_content_id=media_content_id,
mime_type=media.mime_type,
path=media.path,
)
)
if not created_files:
return resolved_attachments
def cleanup_files() -> None:
"""Cleanup temporary files."""
for file in created_files:
file.unlink(missing_ok=True)
@callback
def cleanup_files_callback() -> None:
"""Cleanup temporary files."""
hass.async_add_executor_job(cleanup_files)
session.async_on_cleanup(cleanup_files_callback)
return resolved_attachments
async def async_generate_data( async def async_generate_data(
@@ -115,9 +20,8 @@ async def async_generate_data(
entity_id: str | None = None, entity_id: str | None = None,
instructions: str, instructions: str,
structure: vol.Schema | None = None, structure: vol.Schema | None = None,
attachments: list[dict] | None = None,
) -> GenDataTaskResult: ) -> GenDataTaskResult:
"""Run a data generation task in the AI Task integration.""" """Run a task in the AI Task integration."""
if entity_id is None: if entity_id is None:
entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id entity_id = hass.data[DATA_PREFERENCES].gen_data_entity_id
@@ -133,122 +37,14 @@ async def async_generate_data(
f"AI Task entity {entity_id} does not support generating data" f"AI Task entity {entity_id} does not support generating data"
) )
if ( return await entity.internal_async_generate_data(
attachments GenDataTask(
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features name=task_name,
): instructions=instructions,
raise HomeAssistantError( structure=structure,
f"AI Task entity {entity_id} does not support attachments"
) )
with async_get_chat_session(hass) as session:
resolved_attachments = await _resolve_attachments(hass, session, attachments)
return await entity.internal_async_generate_data(
session,
GenDataTask(
name=task_name,
instructions=instructions,
structure=structure,
attachments=resolved_attachments or None,
),
)
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
"""Remove old images to keep the storage size under the limit."""
if num_to_remove <= 0:
return
if num_to_remove >= len(image_storage):
image_storage.clear()
return
sorted_images = sorted(
image_storage.items(),
key=lambda item: item[1].timestamp,
) )
for filename, _ in sorted_images[:num_to_remove]:
image_storage.pop(filename, None)
async def async_generate_image(
hass: HomeAssistant,
*,
task_name: str,
entity_id: str,
instructions: str,
attachments: list[dict] | None = None,
) -> ServiceResponse:
"""Run an image generation task in the AI Task integration."""
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
if entity is None:
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
if AITaskEntityFeature.GENERATE_IMAGE not in entity.supported_features:
raise HomeAssistantError(
f"AI Task entity {entity_id} does not support generating images"
)
if (
attachments
and AITaskEntityFeature.SUPPORT_ATTACHMENTS not in entity.supported_features
):
raise HomeAssistantError(
f"AI Task entity {entity_id} does not support attachments"
)
with async_get_chat_session(hass) as session:
resolved_attachments = await _resolve_attachments(hass, session, attachments)
task_result = await entity.internal_async_generate_image(
session,
GenImageTask(
name=task_name,
instructions=instructions,
attachments=resolved_attachments or None,
),
)
service_result = task_result.as_dict()
image_data = service_result.pop("image_data")
if service_result.get("revised_prompt") is None:
service_result["revised_prompt"] = instructions
image_storage = hass.data[DATA_IMAGES]
if len(image_storage) + 1 > MAX_IMAGES:
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
current_time = datetime.now()
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
image_storage[filename] = ImageData(
data=image_data,
timestamp=int(current_time.timestamp()),
mime_type=task_result.mime_type,
title=service_result["revised_prompt"],
)
def _purge_image(filename: str, now: datetime) -> None:
"""Remove image from storage."""
image_storage.pop(filename, None)
if IMAGE_EXPIRY_TIME > 0:
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
service_result["url"] = get_url(hass) + async_sign_path(
hass,
f"/api/{DOMAIN}/images/{filename}",
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
)
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
return service_result
@dataclass(slots=True) @dataclass(slots=True)
class GenDataTask: class GenDataTask:
@@ -263,9 +59,6 @@ class GenDataTask:
structure: vol.Schema | None = None structure: vol.Schema | None = None
"""Optional structure for the data to be generated.""" """Optional structure for the data to be generated."""
attachments: list[conversation.Attachment] | None = None
"""List of attachments to go along the instructions."""
def __str__(self) -> str: def __str__(self) -> str:
"""Return task as a string.""" """Return task as a string."""
return f"<GenDataTask {self.name}: {id(self)}>" return f"<GenDataTask {self.name}: {id(self)}>"
@@ -287,80 +80,3 @@ class GenDataTaskResult:
"conversation_id": self.conversation_id, "conversation_id": self.conversation_id,
"data": self.data, "data": self.data,
} }
@dataclass(slots=True)
class GenImageTask:
"""Gen image task to be processed."""
name: str
"""Name of the task."""
instructions: str
"""Instructions on what needs to be done."""
attachments: list[conversation.Attachment] | None = None
"""List of attachments to go along the instructions."""
def __str__(self) -> str:
"""Return task as a string."""
return f"<GenImageTask {self.name}: {id(self)}>"
@dataclass(slots=True)
class GenImageTaskResult:
"""Result of gen image task."""
image_data: bytes
"""Raw image data generated by the model."""
conversation_id: str
"""Unique identifier for the conversation."""
mime_type: str
"""MIME type of the generated image."""
width: int | None = None
"""Width of the generated image, if available."""
height: int | None = None
"""Height of the generated image, if available."""
model: str | None = None
"""Model used to generate the image, if available."""
revised_prompt: str | None = None
"""Revised prompt used to generate the image, if applicable."""
def as_dict(self) -> dict[str, Any]:
"""Return result as a dict."""
return {
"image_data": self.image_data,
"conversation_id": self.conversation_id,
"mime_type": self.mime_type,
"width": self.width,
"height": self.height,
"model": self.model,
"revised_prompt": self.revised_prompt,
}
@dataclass(slots=True)
class ImageData:
"""Image data for stored generated images."""
data: bytes
"""Raw image data."""
timestamp: int
"""Timestamp when the image was generated, as a Unix timestamp."""
mime_type: str
"""MIME type of the image."""
title: str
"""Title of the image, usually the prompt used to generate it."""
def __str__(self) -> str:
"""Return image data as a string."""
return f"<ImageData {self.title}: {id(self)}>"

View File

@@ -6,7 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient", "documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device", "integration_type": "device",
"iot_class": "local_polling", "iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["airgradient==0.9.2"], "requirements": ["airgradient==0.9.2"],
"zeroconf": ["_airgradient._tcp.local."] "zeroconf": ["_airgradient._tcp.local."]
} }

View File

@@ -14,9 +14,9 @@ rules:
status: exempt status: exempt
comment: | comment: |
This integration does not provide additional actions. This integration does not provide additional actions.
docs-high-level-description: done docs-high-level-description: todo
docs-installation-instructions: done docs-installation-instructions: todo
docs-removal-instructions: done docs-removal-instructions: todo
entity-event-setup: entity-event-setup:
status: exempt status: exempt
comment: | comment: |
@@ -34,7 +34,7 @@ rules:
docs-configuration-parameters: docs-configuration-parameters:
status: exempt status: exempt
comment: No options to configure comment: No options to configure
docs-installation-parameters: done docs-installation-parameters: todo
entity-unavailable: done entity-unavailable: done
integration-owner: done integration-owner: done
log-when-unavailable: done log-when-unavailable: done
@@ -43,19 +43,23 @@ rules:
status: exempt status: exempt
comment: | comment: |
This integration does not require authentication. This integration does not require authentication.
test-coverage: done test-coverage: todo
# Gold # Gold
devices: done devices: done
diagnostics: done diagnostics: done
discovery-update-info: done discovery-update-info:
discovery: done status: todo
docs-data-update: done comment: DHCP is still possible
docs-examples: done discovery:
docs-known-limitations: done status: todo
docs-supported-devices: done comment: DHCP is still possible
docs-supported-functions: done docs-data-update: todo
docs-troubleshooting: done docs-examples: todo
docs-use-cases: done docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: dynamic-devices:
status: exempt status: exempt
comment: | comment: |

View File

@@ -61,7 +61,7 @@
"display_pm_standard": { "display_pm_standard": {
"name": "Display PM standard", "name": "Display PM standard",
"state": { "state": {
"ugm3": "μg/m³", "ugm3": "µg/m³",
"us_aqi": "US AQI" "us_aqi": "US AQI"
} }
}, },

View File

@@ -45,6 +45,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bo
# Store Entity and Initialize Platforms # Store Entity and Initialize Platforms
entry.runtime_data = coordinator entry.runtime_data = coordinator
# Listen for option changes
entry.async_on_unload(entry.add_update_listener(update_listener))
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Clean up unused device entries with no entities # Clean up unused device entries with no entities
@@ -85,3 +88,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: AirNowConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
ConfigEntry, ConfigEntry,
ConfigFlow, ConfigFlow,
ConfigFlowResult, ConfigFlowResult,
OptionsFlowWithReload, OptionsFlow,
) )
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
@@ -126,7 +126,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
return AirNowOptionsFlowHandler() return AirNowOptionsFlowHandler()
class AirNowOptionsFlowHandler(OptionsFlowWithReload): class AirNowOptionsFlowHandler(OptionsFlow):
"""Handle an options flow for AirNow.""" """Handle an options flow for AirNow."""
async def async_step_init( async def async_step_init(

View File

@@ -1,45 +0,0 @@
"""The Ubiquiti airOS integration."""
from __future__ import annotations
from airos.airos8 import AirOS8
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.SENSOR,
]
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Set up Ubiquiti airOS from a config entry."""
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(hass, verify_ssl=False)
airos_device = AirOS8(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=session,
)
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -1,106 +0,0 @@
"""AirOS Binary Sensor component for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describe an AirOS binary sensor."""
value_fn: Callable[[AirOS8Data], bool]
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
AirOSBinarySensorEntityDescription(
key="portfw",
translation_key="port_forwarding",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.portfw,
),
AirOSBinarySensorEntityDescription(
key="dhcp_client",
translation_key="dhcp_client",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.dhcpc,
),
AirOSBinarySensorEntityDescription(
key="dhcp_server",
translation_key="dhcp_server",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.dhcpd,
entity_registry_enabled_default=False,
),
AirOSBinarySensorEntityDescription(
key="dhcp6_server",
translation_key="dhcp6_server",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.dhcp6d_stateful,
entity_registry_enabled_default=False,
),
AirOSBinarySensorEntityDescription(
key="pppoe",
translation_key="pppoe",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.pppoe,
entity_registry_enabled_default=False,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AirOSConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the AirOS binary sensors from a config entry."""
coordinator = config_entry.runtime_data
async_add_entities(
AirOSBinarySensor(coordinator, description) for description in BINARY_SENSORS
)
class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
"""Representation of a binary sensor."""
entity_description: AirOSBinarySensorEntityDescription
def __init__(
self,
coordinator: AirOSDataUpdateCoordinator,
description: AirOSBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}"
@property
def is_on(self) -> bool:
"""Return the state of the binary sensor."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -1,82 +0,0 @@
"""Config flow for the Ubiquiti airOS integration."""
from __future__ import annotations
import logging
from typing import Any
from airos.exceptions import (
AirOSConnectionAuthenticationError,
AirOSConnectionSetupError,
AirOSDataMissingError,
AirOSDeviceConnectionError,
AirOSKeyDataMissingError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
from .coordinator import AirOS8
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME, default="ubnt"): str,
vol.Required(CONF_PASSWORD): str,
}
)
class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Ubiquiti airOS."""
VERSION = 1
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(self.hass, verify_ssl=False)
airos_device = AirOS8(
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=airos_data.host.hostname, data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)

View File

@@ -1,9 +0,0 @@
"""Constants for the Ubiquiti airOS integration."""
from datetime import timedelta
DOMAIN = "airos"
SCAN_INTERVAL = timedelta(minutes=1)
MANUFACTURER = "Ubiquiti"

View File

@@ -1,70 +0,0 @@
"""DataUpdateCoordinator for AirOS."""
from __future__ import annotations
import logging
from airos.airos8 import AirOS8, AirOS8Data
from airos.exceptions import (
AirOSConnectionAuthenticationError,
AirOSConnectionSetupError,
AirOSDataMissingError,
AirOSDeviceConnectionError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
"""Class to manage fetching AirOS data from single endpoint."""
config_entry: AirOSConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS8
) -> None:
"""Initialize the coordinator."""
self.airos_device = airos_device
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> AirOS8Data:
"""Fetch data from AirOS."""
try:
await self.airos_device.login()
return await self.airos_device.status()
except (AirOSConnectionAuthenticationError,) as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
TimeoutError,
) as err:
_LOGGER.error("Error connecting to airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
except (AirOSDataMissingError,) as err:
_LOGGER.error("Expected data not returned by airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="error_data_missing",
) from err

View File

@@ -1,33 +0,0 @@
"""Diagnostics support for airOS."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from .coordinator import AirOSConfigEntry
IP_REDACT = ["addr", "ipaddr", "ip6addr", "lastip"] # IP related
HW_REDACT = ["apmac", "hwaddr", "mac"] # MAC address
TO_REDACT_HA = [CONF_HOST, CONF_PASSWORD]
TO_REDACT_AIROS = [
"hostname", # Prevent leaking device naming
"essid", # Network SSID
"lat", # GPS latitude to prevent exposing location data.
"lon", # GPS longitude to prevent exposing location data.
*HW_REDACT,
*IP_REDACT,
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: AirOSConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
return {
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
}

View File

@@ -1,36 +0,0 @@
"""Generic AirOS Entity Class."""
from __future__ import annotations
from homeassistant.const import CONF_HOST
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import AirOSDataUpdateCoordinator
class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
"""Represent a AirOS Entity."""
_attr_has_entity_name = True
def __init__(self, coordinator: AirOSDataUpdateCoordinator) -> None:
"""Initialise the gateway."""
super().__init__(coordinator)
airos_data = self.coordinator.data
configuration_url: str | None = (
f"https://{coordinator.config_entry.data[CONF_HOST]}"
)
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)},
configuration_url=configuration_url,
identifiers={(DOMAIN, str(airos_data.host.device_id))},
manufacturer=MANUFACTURER,
model=airos_data.host.devmodel,
name=airos_data.host.hostname,
sw_version=airos_data.host.fwversion,
)

View File

@@ -1,10 +0,0 @@
{
"domain": "airos",
"name": "Ubiquiti airOS",
"codeowners": ["@CoMPaTech"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.5.1"]
}

View File

@@ -1,70 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: airOS does not have actions
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: airOS does not have actions
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: local_polling without events
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: airOS does not have actions
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info: todo
discovery: todo
docs-data-update: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: todo
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations:
status: exempt
comment: no (custom) icons used or envisioned
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -1,194 +0,0 @@
"""AirOS Sensor component for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import (
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS,
UnitOfDataRate,
UnitOfFrequency,
UnitOfLength,
UnitOfTime,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
NETROLE_OPTIONS = [mode.value for mode in NetRole]
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AirOSSensorEntityDescription(SensorEntityDescription):
"""Describe an AirOS sensor."""
value_fn: Callable[[AirOS8Data], StateType]
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
AirOSSensorEntityDescription(
key="host_cpuload",
translation_key="host_cpuload",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
value_fn=lambda data: data.host.cpuload,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="host_netrole",
translation_key="host_netrole",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.host.netrole.value,
options=NETROLE_OPTIONS,
),
AirOSSensorEntityDescription(
key="wireless_frequency",
translation_key="wireless_frequency",
native_unit_of_measurement=UnitOfFrequency.MEGAHERTZ,
device_class=SensorDeviceClass.FREQUENCY,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.frequency,
),
AirOSSensorEntityDescription(
key="wireless_essid",
translation_key="wireless_essid",
value_fn=lambda data: data.wireless.essid,
),
AirOSSensorEntityDescription(
key="wireless_antenna_gain",
translation_key="wireless_antenna_gain",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda data: data.wireless.antenna_gain,
),
AirOSSensorEntityDescription(
key="wireless_throughput_tx",
translation_key="wireless_throughput_tx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.tx,
),
AirOSSensorEntityDescription(
key="wireless_throughput_rx",
translation_key="wireless_throughput_rx",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.rx,
),
AirOSSensorEntityDescription(
key="wireless_polling_dl_capacity",
translation_key="wireless_polling_dl_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.dl_capacity,
),
AirOSSensorEntityDescription(
key="wireless_polling_ul_capacity",
translation_key="wireless_polling_ul_capacity",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
AirOSSensorEntityDescription(
key="host_uptime",
translation_key="host_uptime",
native_unit_of_measurement=UnitOfTime.SECONDS,
device_class=SensorDeviceClass.DURATION,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda data: data.host.uptime,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_distance",
translation_key="wireless_distance",
native_unit_of_measurement=UnitOfLength.METERS,
device_class=SensorDeviceClass.DISTANCE,
suggested_display_precision=1,
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
value_fn=lambda data: data.wireless.distance,
),
AirOSSensorEntityDescription(
key="wireless_mode",
translation_key="wireless_mode",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.mode.value,
options=WIRELESS_MODE_OPTIONS,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_role",
translation_key="wireless_role",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.role.value,
options=WIRELESS_ROLE_OPTIONS,
entity_registry_enabled_default=False,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AirOSConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the AirOS sensors from a config entry."""
coordinator = config_entry.runtime_data
async_add_entities(AirOSSensor(coordinator, description) for description in SENSORS)
class AirOSSensor(AirOSEntity, SensorEntity):
"""Representation of a Sensor."""
entity_description: AirOSSensorEntityDescription
def __init__(
self,
coordinator: AirOSDataUpdateCoordinator,
description: AirOSSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -1,117 +0,0 @@
{
"config": {
"flow_title": "Ubiquiti airOS device",
"step": {
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"host": "IP address or hostname of the airOS device",
"username": "Administrator username for the airOS device, normally 'ubnt'",
"password": "Password configured through the UISP app or web interface"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"key_data_missing": "Expected data not returned from the device, check the documentation for supported devices",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"entity": {
"binary_sensor": {
"port_forwarding": {
"name": "Port forwarding"
},
"dhcp_client": {
"name": "DHCP client"
},
"dhcp_server": {
"name": "DHCP server"
},
"dhcp6_server": {
"name": "DHCPv6 server"
},
"pppoe": {
"name": "PPPoE link"
}
},
"sensor": {
"host_cpuload": {
"name": "CPU load"
},
"host_netrole": {
"name": "Network role",
"state": {
"bridge": "Bridge",
"router": "Router"
}
},
"wireless_frequency": {
"name": "Wireless frequency"
},
"wireless_essid": {
"name": "Wireless SSID"
},
"wireless_antenna_gain": {
"name": "Antenna gain"
},
"wireless_throughput_tx": {
"name": "Throughput transmit (actual)"
},
"wireless_throughput_rx": {
"name": "Throughput receive (actual)"
},
"wireless_polling_dl_capacity": {
"name": "Download capacity"
},
"wireless_polling_ul_capacity": {
"name": "Upload capacity"
},
"wireless_remote_hostname": {
"name": "Remote hostname"
},
"host_uptime": {
"name": "Uptime"
},
"wireless_distance": {
"name": "Wireless distance"
},
"wireless_role": {
"name": "Wireless role",
"state": {
"access_point": "Access point",
"station": "Station"
}
},
"wireless_mode": {
"name": "Wireless mode",
"state": {
"point_to_point": "Point-to-point",
"point_to_multipoint": "Point-to-multipoint"
}
}
}
},
"exceptions": {
"invalid_auth": {
"message": "[%key:common::config_flow::error::invalid_auth%]"
},
"cannot_connect": {
"message": "[%key:common::config_flow::error::cannot_connect%]"
},
"key_data_missing": {
"message": "Key data not returned from device"
},
"error_data_missing": {
"message": "Data incomplete or missing"
}
}
}

View File

@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
from .coordinator import AirQCoordinator from .coordinator import AirQCoordinator
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR] PLATFORMS: list[Platform] = [Platform.SENSOR]
AirQConfigEntry = ConfigEntry[AirQCoordinator] AirQConfigEntry = ConfigEntry[AirQCoordinator]

View File

@@ -6,5 +6,6 @@ CONF_RETURN_AVERAGE: Final = "return_average"
CONF_CLIP_NEGATIVE: Final = "clip_negatives" CONF_CLIP_NEGATIVE: Final = "clip_negatives"
DOMAIN: Final = "airq" DOMAIN: Final = "airq"
MANUFACTURER: Final = "CorantGmbH" MANUFACTURER: Final = "CorantGmbH"
CONCENTRATION_GRAMS_PER_CUBIC_METER: Final = "g/m³"
ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³" ACTIVITY_BECQUEREL_PER_CUBIC_METER: Final = "Bq/m³"
UPDATE_INTERVAL: float = 10.0 UPDATE_INTERVAL: float = 10.0

View File

@@ -75,7 +75,6 @@ class AirQCoordinator(DataUpdateCoordinator):
return_average=self.return_average, return_average=self.return_average,
clip_negative_values=self.clip_negative, clip_negative_values=self.clip_negative,
) )
data["brightness"] = await self.airq.get_current_brightness()
if warming_up_sensors := identify_warming_up_sensors(data): if warming_up_sensors := identify_warming_up_sensors(data):
_LOGGER.debug( _LOGGER.debug(
"Following sensors are still warming up: %s", warming_up_sensors "Following sensors are still warming up: %s", warming_up_sensors

View File

@@ -4,6 +4,9 @@
"health_index": { "health_index": {
"default": "mdi:heart-pulse" "default": "mdi:heart-pulse"
}, },
"absolute_humidity": {
"default": "mdi:water"
},
"oxygen": { "oxygen": {
"default": "mdi:leaf" "default": "mdi:leaf"
}, },

View File

@@ -1,85 +0,0 @@
"""Definition of air-Q number platform used to control the LED strips."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
from aioairq.core import AirQ
from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirQConfigEntry, AirQCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class AirQBrightnessDescription(NumberEntityDescription):
"""Describes AirQ number entity responsible for brightness control."""
value: Callable[[dict], float]
set_value: Callable[[AirQ, float], Awaitable[None]]
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
key="airq_led_brightness",
translation_key="airq_led_brightness",
native_min_value=0.0,
native_max_value=100.0,
native_step=1.0,
native_unit_of_measurement=PERCENTAGE,
value=lambda data: data["brightness"],
set_value=lambda device, value: device.set_current_brightness(value),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AirQConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up number entities: a single entity for the LEDs."""
coordinator = entry.runtime_data
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
async_add_entities(entities)
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
"""Representation of the LEDs from a single AirQ."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AirQCoordinator,
description: AirQBrightnessDescription,
) -> None:
"""Initialize a single sensor."""
super().__init__(coordinator)
self.entity_description: AirQBrightnessDescription = description
self._attr_device_info = coordinator.device_info
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
@property
def native_value(self) -> float:
"""Return the brightness of the LEDs in %."""
return self.entity_description.value(self.coordinator.data)
async def async_set_native_value(self, value: float) -> None:
"""Set the brightness of the LEDs to the value in %."""
_LOGGER.debug(
"Changing LED brighntess from %.0f%% to %.0f%%",
self.coordinator.data["brightness"],
value,
)
await self.entity_description.set_value(self.coordinator.airq, value)
await self.coordinator.async_request_refresh()

View File

@@ -14,7 +14,6 @@ from homeassistant.components.sensor import (
SensorStateClass, SensorStateClass,
) )
from homeassistant.const import ( from homeassistant.const import (
CONCENTRATION_GRAMS_PER_CUBIC_METER,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER, CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_BILLION,
@@ -29,7 +28,10 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirQConfigEntry, AirQCoordinator from . import AirQConfigEntry, AirQCoordinator
from .const import ACTIVITY_BECQUEREL_PER_CUBIC_METER from .const import (
ACTIVITY_BECQUEREL_PER_CUBIC_METER,
CONCENTRATION_GRAMS_PER_CUBIC_METER,
)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -193,7 +195,7 @@ SENSOR_TYPES: list[AirQEntityDescription] = [
), ),
AirQEntityDescription( AirQEntityDescription(
key="humidity_abs", key="humidity_abs",
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY, translation_key="absolute_humidity",
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER, native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT, state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.get("humidity_abs"), value=lambda data: data.get("humidity_abs"),

View File

@@ -35,11 +35,6 @@
} }
}, },
"entity": { "entity": {
"number": {
"airq_led_brightness": {
"name": "LED brightness"
}
},
"sensor": { "sensor": {
"acetaldehyde": { "acetaldehyde": {
"name": "Acetaldehyde" "name": "Acetaldehyde"
@@ -98,6 +93,9 @@
"health_index": { "health_index": {
"name": "Health index" "name": "Health index"
}, },
"absolute_humidity": {
"name": "Absolute humidity"
},
"hydrogen": { "hydrogen": {
"name": "Hydrogen" "name": "Hydrogen"
}, },

View File

@@ -7,18 +7,21 @@ import logging
from airthings import Airthings from airthings import Airthings
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ID, Platform from homeassistant.const import CONF_ID, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_SECRET from .const import CONF_SECRET
from .coordinator import AirthingsConfigEntry, AirthingsDataUpdateCoordinator from .coordinator import AirthingsDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR] PLATFORMS: list[Platform] = [Platform.SENSOR]
SCAN_INTERVAL = timedelta(minutes=6) SCAN_INTERVAL = timedelta(minutes=6)
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
"""Set up Airthings from a config entry.""" """Set up Airthings from a config entry."""
@@ -28,7 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
async_get_clientsession(hass), async_get_clientsession(hass),
) )
coordinator = AirthingsDataUpdateCoordinator(hass, airthings, entry) coordinator = AirthingsDataUpdateCoordinator(hass, airthings)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()

View File

@@ -45,8 +45,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
) )
errors = {} errors = {}
await self.async_set_unique_id(user_input[CONF_ID])
self._abort_if_unique_id_configured()
try: try:
await airthings.get_token( await airthings.get_token(
@@ -62,6 +60,9 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
else: else:
await self.async_set_unique_id(user_input[CONF_ID])
self._abort_if_unique_id_configured()
return self.async_create_entry(title="Airthings", data=user_input) return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form( return self.async_show_form(

View File

@@ -5,7 +5,6 @@ import logging
from airthings import Airthings, AirthingsDevice, AirthingsError from airthings import Airthings, AirthingsDevice, AirthingsError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -14,23 +13,15 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=6) SCAN_INTERVAL = timedelta(minutes=6)
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]): class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
"""Coordinator for Airthings data updates.""" """Coordinator for Airthings data updates."""
def __init__( def __init__(self, hass: HomeAssistant, airthings: Airthings) -> None:
self,
hass: HomeAssistant,
airthings: Airthings,
config_entry: AirthingsConfigEntry,
) -> None:
"""Initialize the coordinator.""" """Initialize the coordinator."""
super().__init__( super().__init__(
hass, hass,
_LOGGER, _LOGGER,
config_entry=config_entry,
name=DOMAIN, name=DOMAIN,
update_method=self._update_method, update_method=self._update_method,
update_interval=SCAN_INTERVAL, update_interval=SCAN_INTERVAL,

View File

@@ -150,7 +150,7 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
entities = [ entities = [
AirthingsDeviceSensor( AirthingsHeaterEnergySensor(
coordinator, coordinator,
airthings_device, airthings_device,
SENSORS[sensor_types], SENSORS[sensor_types],
@@ -162,7 +162,7 @@ async def async_setup_entry(
async_add_entities(entities) async_add_entities(entities)
class AirthingsDeviceSensor( class AirthingsHeaterEnergySensor(
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
): ):
"""Representation of a Airthings Sensor device.""" """Representation of a Airthings Sensor device."""

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["aioairzone_cloud"], "loggers": ["aioairzone_cloud"],
"requirements": ["aioairzone-cloud==0.7.2"] "requirements": ["aioairzone-cloud==0.6.12"]
} }

View File

@@ -2,112 +2,39 @@
from __future__ import annotations from __future__ import annotations
from genie_partner_sdk.client import AladdinConnectClient from homeassistant.config_entries import ConfigEntry
from genie_partner_sdk.model import GarageDoor
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import ( from homeassistant.helpers import issue_registry as ir
aiohttp_client,
config_entry_oauth2_flow,
device_registry as dr,
)
from . import api DOMAIN = "aladdin_connect"
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
PLATFORMS: list[Platform] = [Platform.COVER, Platform.SENSOR]
async def async_setup_entry( async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
hass: HomeAssistant, entry: AladdinConnectConfigEntry """Set up Aladdin Connect from a config entry."""
) -> bool: ir.async_create_issue(
"""Set up Aladdin Connect Genie from a config entry.""" hass,
implementation = ( DOMAIN,
await config_entry_oauth2_flow.async_get_config_entry_implementation( DOMAIN,
hass, entry is_fixable=False,
) severity=ir.IssueSeverity.ERROR,
translation_key="integration_removed",
translation_placeholders={
"entries": "/config/integrations/integration/aladdin_connect",
},
) )
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
client = AladdinConnectClient(
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
)
sdk_doors = await client.get_doors()
# Convert SDK GarageDoor objects to integration GarageDoor objects
doors = [
GarageDoor(
{
"device_id": door.device_id,
"door_number": door.door_number,
"name": door.name,
"status": door.status,
"link_status": door.link_status,
"battery_level": door.battery_level,
}
)
for door in sdk_doors
]
entry.runtime_data = {
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
for door in doors
}
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
remove_stale_devices(hass, entry)
return True return True
async def async_unload_entry( async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass: HomeAssistant, entry: AladdinConnectConfigEntry
) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_migrate_entry(
hass: HomeAssistant, config_entry: AladdinConnectConfigEntry
) -> bool:
"""Migrate old config."""
if config_entry.version < CONFIG_FLOW_VERSION:
config_entry.async_start_reauth(hass)
new_data = {**config_entry.data}
hass.config_entries.async_update_entry(
config_entry,
data=new_data,
version=CONFIG_FLOW_VERSION,
minor_version=CONFIG_FLOW_MINOR_VERSION,
)
return True return True
def remove_stale_devices( async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
hass: HomeAssistant, """Remove a config entry."""
config_entry: AladdinConnectConfigEntry, if not hass.config_entries.async_loaded_entries(DOMAIN):
) -> None: ir.async_delete_issue(hass, DOMAIN, DOMAIN)
"""Remove stale devices from device registry.""" # Remove any remaining disabled or ignored entries
device_registry = dr.async_get(hass) for _entry in hass.config_entries.async_entries(DOMAIN):
device_entries = dr.async_entries_for_config_entry( hass.async_create_task(hass.config_entries.async_remove(_entry.entry_id))
device_registry, config_entry.entry_id
)
all_device_ids = set(config_entry.runtime_data)
for device_entry in device_entries:
device_id: str | None = None
for identifier in device_entry.identifiers:
if identifier[0] == DOMAIN:
device_id = identifier[1]
break
if device_id and device_id not in all_device_ids:
device_registry.async_update_device(
device_entry.id, remove_config_entry_id=config_entry.entry_id
)

View File

@@ -1,33 +0,0 @@
"""API for Aladdin Connect Genie bound to Home Assistant OAuth."""
from typing import cast
from aiohttp import ClientSession
from genie_partner_sdk.auth import Auth
from homeassistant.helpers import config_entry_oauth2_flow
API_URL = "https://twdvzuefzh.execute-api.us-east-2.amazonaws.com/v1"
API_KEY = "k6QaiQmcTm2zfaNns5L1Z8duBtJmhDOW8JawlCC3"
class AsyncConfigEntryAuth(Auth):
"""Provide Aladdin Connect Genie authentication tied to an OAuth2 based config entry."""
def __init__(
self,
websession: ClientSession,
oauth_session: config_entry_oauth2_flow.OAuth2Session,
) -> None:
"""Initialize Aladdin Connect Genie auth."""
super().__init__(
websession, API_URL, oauth_session.token["access_token"], API_KEY
)
self._oauth_session = oauth_session
async def async_get_access_token(self) -> str:
"""Return a valid access token."""
if not self._oauth_session.valid_token:
await self._oauth_session.async_ensure_token_valid()
return cast(str, self._oauth_session.token["access_token"])

View File

@@ -1,14 +0,0 @@
"""application_credentials platform the Aladdin Connect Genie integration."""
from homeassistant.components.application_credentials import AuthorizationServer
from homeassistant.core import HomeAssistant
from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
"""Return authorization server."""
return AuthorizationServer(
authorize_url=OAUTH2_AUTHORIZE,
token_url=OAUTH2_TOKEN,
)

View File

@@ -1,63 +1,11 @@
"""Config flow for Aladdin Connect Genie.""" """Config flow for Aladdin Connect integration."""
from collections.abc import Mapping from homeassistant.config_entries import ConfigFlow
import logging
from typing import Any
import jwt from . import DOMAIN
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
from homeassistant.helpers import config_entry_oauth2_flow
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
class OAuth2FlowHandler( class AladdinConnectConfigFlow(ConfigFlow, domain=DOMAIN):
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN """Handle a config flow for Aladdin Connect."""
):
"""Config flow to handle Aladdin Connect Genie OAuth2 authentication."""
DOMAIN = DOMAIN VERSION = 1
VERSION = CONFIG_FLOW_VERSION
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
async def async_step_reauth(
self, user_input: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon API auth error or upgrade from v1 to v2."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: Mapping[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({}),
)
return await self.async_step_user()
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Create an oauth config entry or update existing entry for reauth."""
# Extract the user ID from the JWT token's 'sub' field
token = jwt.decode(
data["token"]["access_token"], options={"verify_signature": False}
)
user_id = token["sub"]
await self.async_set_unique_id(user_id)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch(reason="wrong_account")
return self.async_update_reload_and_abort(
self._get_reauth_entry(), data=data
)
self._abort_if_unique_id_configured()
return self.async_create_entry(title="Aladdin Connect", data=data)
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)

View File

@@ -1,14 +0,0 @@
"""Constants for the Aladdin Connect Genie integration."""
from typing import Final
from homeassistant.components.cover import CoverEntityFeature
DOMAIN = "aladdin_connect"
CONFIG_FLOW_VERSION = 2
CONFIG_FLOW_MINOR_VERSION = 1
OAUTH2_AUTHORIZE = "https://app.aladdinconnect.com/login.html"
OAUTH2_TOKEN = "https://twdvzuefzh.execute-api.us-east-2.amazonaws.com/v1/oauth2/token"
SUPPORTED_FEATURES: Final = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE

View File

@@ -1,44 +0,0 @@
"""Coordinator for Aladdin Connect integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from genie_partner_sdk.client import AladdinConnectClient
from genie_partner_sdk.model import GarageDoor
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
type AladdinConnectConfigEntry = ConfigEntry[dict[str, AladdinConnectCoordinator]]
SCAN_INTERVAL = timedelta(seconds=15)
class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
"""Coordinator for Aladdin Connect integration."""
def __init__(
self,
hass: HomeAssistant,
entry: AladdinConnectConfigEntry,
client: AladdinConnectClient,
garage_door: GarageDoor,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
logger=_LOGGER,
config_entry=entry,
name="Aladdin Connect Coordinator",
update_interval=SCAN_INTERVAL,
)
self.client = client
self.data = garage_door
async def _async_update_data(self) -> GarageDoor:
"""Fetch data from the Aladdin Connect API."""
await self.client.update_door(self.data.device_id, self.data.door_number)
return self.data

View File

@@ -1,62 +0,0 @@
"""Cover Entity for Genie Garage Door."""
from __future__ import annotations
from typing import Any
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import SUPPORTED_FEATURES
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
from .entity import AladdinConnectEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: AladdinConnectConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the cover platform."""
coordinators = entry.runtime_data
async_add_entities(
AladdinCoverEntity(coordinator) for coordinator in coordinators.values()
)
class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
"""Representation of Aladdin Connect cover."""
_attr_device_class = CoverDeviceClass.GARAGE
_attr_supported_features = SUPPORTED_FEATURES
_attr_name = None
def __init__(self, coordinator: AladdinConnectCoordinator) -> None:
"""Initialize the Aladdin Connect cover."""
super().__init__(coordinator)
self._attr_unique_id = coordinator.data.unique_id
async def async_open_cover(self, **kwargs: Any) -> None:
"""Issue open command to cover."""
await self.client.open_door(self._device_id, self._number)
async def async_close_cover(self, **kwargs: Any) -> None:
"""Issue close command to cover."""
await self.client.close_door(self._device_id, self._number)
@property
def is_closed(self) -> bool | None:
"""Update is closed attribute."""
return self.coordinator.data.status == "closed"
@property
def is_closing(self) -> bool | None:
"""Update is closing attribute."""
return self.coordinator.data.status == "closing"
@property
def is_opening(self) -> bool | None:
"""Update is opening attribute."""
return self.coordinator.data.status == "opening"

View File

@@ -1,32 +0,0 @@
"""Base class for Aladdin Connect entities."""
from genie_partner_sdk.client import AladdinConnectClient
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import AladdinConnectCoordinator
class AladdinConnectEntity(CoordinatorEntity[AladdinConnectCoordinator]):
"""Defines a base Aladdin Connect entity."""
_attr_has_entity_name = True
def __init__(self, coordinator: AladdinConnectCoordinator) -> None:
"""Initialize Aladdin Connect entity."""
super().__init__(coordinator)
device = coordinator.data
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.unique_id)},
manufacturer="Aladdin Connect",
name=device.name,
)
self._device_id = device.device_id
self._number = device.door_number
@property
def client(self) -> AladdinConnectClient:
"""Return the client for this entity."""
return self.coordinator.client

View File

@@ -1,11 +1,9 @@
{ {
"domain": "aladdin_connect", "domain": "aladdin_connect",
"name": "Aladdin Connect", "name": "Aladdin Connect",
"codeowners": ["@swcloudgenie"], "codeowners": [],
"config_flow": true,
"dependencies": ["application_credentials"],
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect", "documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
"integration_type": "hub", "integration_type": "system",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["genie-partner-sdk==1.0.10"] "requirements": []
} }

View File

@@ -1,94 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register any service actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow: done
config-flow-test-coverage: todo
dependency-transparency: done
docs-actions:
status: exempt
comment: Integration does not register any service actions.
docs-high-level-description: done
docs-installation-instructions:
status: todo
comment: Documentation needs to be created.
docs-removal-instructions:
status: todo
comment: Documentation needs to be created.
entity-event-setup:
status: exempt
comment: Integration does not subscribe to external events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure:
status: todo
comment: Config flow does not currently test connection during setup.
test-before-setup: todo
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters:
status: todo
comment: Documentation needs to be created.
docs-installation-parameters:
status: todo
comment: Documentation needs to be created.
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: Platform tests for cover and sensor need to be implemented to reach 95% coverage.
# Gold
devices: done
diagnostics: todo
discovery: todo
discovery-update-info: todo
docs-data-update:
status: todo
comment: Documentation needs to be created.
docs-examples:
status: todo
comment: Documentation needs to be created.
docs-known-limitations:
status: todo
comment: Documentation needs to be created.
docs-supported-devices:
status: todo
comment: Documentation needs to be created.
docs-supported-functions:
status: todo
comment: Documentation needs to be created.
docs-troubleshooting:
status: todo
comment: Documentation needs to be created.
docs-use-cases:
status: todo
comment: Documentation needs to be created.
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices:
status: todo
comment: Stale devices can be done dynamically
# Platinum
async-dependency: todo
inject-websession: done
strict-typing: done

View File

@@ -1,77 +0,0 @@
"""Support for Aladdin Connect Genie sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from genie_partner_sdk.model import GarageDoor
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
from .entity import AladdinConnectEntity
@dataclass(frozen=True, kw_only=True)
class AladdinConnectSensorEntityDescription(SensorEntityDescription):
"""Sensor entity description for Aladdin Connect."""
value_fn: Callable[[GarageDoor], float | None]
SENSOR_TYPES: tuple[AladdinConnectSensorEntityDescription, ...] = (
AladdinConnectSensorEntityDescription(
key="battery_level",
device_class=SensorDeviceClass.BATTERY,
entity_registry_enabled_default=False,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda garage_door: garage_door.battery_level,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AladdinConnectConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Aladdin Connect sensor devices."""
coordinators = entry.runtime_data
async_add_entities(
AladdinConnectSensor(coordinator, description)
for coordinator in coordinators.values()
for description in SENSOR_TYPES
)
class AladdinConnectSensor(AladdinConnectEntity, SensorEntity):
"""A sensor implementation for Aladdin Connect device."""
entity_description: AladdinConnectSensorEntityDescription
def __init__(
self,
coordinator: AladdinConnectCoordinator,
entity_description: AladdinConnectSensorEntityDescription,
) -> None:
"""Initialize the Aladdin Connect sensor."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = f"{coordinator.data.unique_id}-{entity_description.key}"
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -1,30 +1,8 @@
{ {
"config": { "issues": {
"step": { "integration_removed": {
"pick_implementation": { "title": "The Aladdin Connect integration has been removed",
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" "description": "The Aladdin Connect integration has been removed from Home Assistant.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing Aladdin Connect integration entries]({entries})."
},
"reauth_confirm": {
"title": "[%key:common::config_flow::title::reauth%]",
"description": "Aladdin Connect needs to re-authenticate your account"
}
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
} }
} }
} }

View File

@@ -1,7 +1,4 @@
"""Support for repeating alerts when conditions are met. """Support for repeating alerts when conditions are met."""
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
"""
from __future__ import annotations from __future__ import annotations
@@ -66,10 +63,7 @@ CONFIG_SCHEMA = vol.Schema(
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Alert component. """Set up the Alert component."""
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
"""
component = EntityComponent[AlertEntity](LOGGER, DOMAIN, hass) component = EntityComponent[AlertEntity](LOGGER, DOMAIN, hass)
entities: list[AlertEntity] = [] entities: list[AlertEntity] = []

View File

@@ -1,7 +1,4 @@
"""Support for repeating alerts when conditions are met. """Support for repeating alerts when conditions are met."""
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
"""
from __future__ import annotations from __future__ import annotations
@@ -30,10 +27,7 @@ from .const import DOMAIN, LOGGER
class AlertEntity(Entity): class AlertEntity(Entity):
"""Representation of an alert. """Representation of an alert."""
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
"""
_attr_should_poll = False _attr_should_poll = False

View File

@@ -1,7 +1,4 @@
"""Reproduce an Alert state. """Reproduce an Alert state."""
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
"""
from __future__ import annotations from __future__ import annotations

View File

@@ -505,13 +505,8 @@ class ClimateCapabilities(AlexaEntity):
): ):
yield AlexaThermostatController(self.hass, self.entity) yield AlexaThermostatController(self.hass, self.entity)
yield AlexaTemperatureSensor(self.hass, self.entity) yield AlexaTemperatureSensor(self.hass, self.entity)
if ( if self.entity.domain == water_heater.DOMAIN and (
self.entity.domain == water_heater.DOMAIN supported_features & water_heater.WaterHeaterEntityFeature.OPERATION_MODE
and (
supported_features
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
)
and self.entity.attributes.get(water_heater.ATTR_OPERATION_LIST)
): ):
yield AlexaModeController( yield AlexaModeController(
self.entity, self.entity,
@@ -639,9 +634,7 @@ class FanCapabilities(AlexaEntity):
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}" self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_OSCILLATING}"
) )
force_range_controller = False force_range_controller = False
if supported & fan.FanEntityFeature.PRESET_MODE and self.entity.attributes.get( if supported & fan.FanEntityFeature.PRESET_MODE:
fan.ATTR_PRESET_MODES
):
yield AlexaModeController( yield AlexaModeController(
self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}" self.entity, instance=f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}"
) )
@@ -679,11 +672,7 @@ class RemoteCapabilities(AlexaEntity):
yield AlexaPowerController(self.entity) yield AlexaPowerController(self.entity)
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or [] activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or []
if ( if activities and supported & remote.RemoteEntityFeature.ACTIVITY:
activities
and (supported & remote.RemoteEntityFeature.ACTIVITY)
and self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST)
):
yield AlexaModeController( yield AlexaModeController(
self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}" self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}"
) )
@@ -703,9 +692,7 @@ class HumidifierCapabilities(AlexaEntity):
"""Yield the supported interfaces.""" """Yield the supported interfaces."""
yield AlexaPowerController(self.entity) yield AlexaPowerController(self.entity)
supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if ( if supported & humidifier.HumidifierEntityFeature.MODES:
supported & humidifier.HumidifierEntityFeature.MODES
) and self.entity.attributes.get(humidifier.ATTR_AVAILABLE_MODES):
yield AlexaModeController( yield AlexaModeController(
self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}" self.entity, instance=f"{humidifier.DOMAIN}.{humidifier.ATTR_MODE}"
) )

View File

@@ -1,13 +1,9 @@
"""Alexa Devices integration.""" """Alexa Devices integration."""
from homeassistant.const import CONF_COUNTRY, Platform from homeassistant.const import Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.typing import ConfigType
from .const import _LOGGER, CONF_LOGIN_DATA, COUNTRY_DOMAINS, DOMAIN
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
from .services import async_setup_services
PLATFORMS = [ PLATFORMS = [
Platform.BINARY_SENSOR, Platform.BINARY_SENSOR,
@@ -16,20 +12,11 @@ PLATFORMS = [
Platform.SWITCH, Platform.SWITCH,
] ]
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Alexa Devices component."""
async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
"""Set up Alexa Devices platform.""" """Set up Alexa Devices platform."""
session = aiohttp_client.async_create_clientsession(hass) coordinator = AmazonDevicesCoordinator(hass, entry)
coordinator = AmazonDevicesCoordinator(hass, entry, session)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()
@@ -40,32 +27,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo
return True return True
async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
"""Migrate old entry."""
if entry.version == 1 and entry.minor_version == 1:
_LOGGER.debug(
"Migrating from version %s.%s", entry.version, entry.minor_version
)
# Convert country in domain
country = entry.data[CONF_COUNTRY].lower()
domain = COUNTRY_DOMAINS.get(country, country)
# Add site to login data
new_data = entry.data.copy()
new_data[CONF_LOGIN_DATA]["site"] = f"https://www.amazon.{domain}"
hass.config_entries.async_update_entry(
entry, data=new_data, version=1, minor_version=2
)
_LOGGER.info(
"Migration to version %s.%s successful", entry.version, entry.minor_version
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) coordinator = entry.runtime_data
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
await coordinator.api.close()
return unload_ok

View File

@@ -6,18 +6,14 @@ from collections.abc import Mapping
from typing import Any from typing import Any
from aioamazondevices.api import AmazonEchoApi from aioamazondevices.api import AmazonEchoApi
from aioamazondevices.exceptions import ( from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect, WrongCountry
CannotAuthenticate,
CannotConnect,
CannotRetrieveData,
)
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_USERNAME from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client
import homeassistant.helpers.config_validation as cv import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.selector import CountrySelector
from .const import CONF_LOGIN_DATA, DOMAIN from .const import CONF_LOGIN_DATA, DOMAIN
@@ -27,33 +23,28 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_CODE): cv.string, vol.Required(CONF_CODE): cv.string,
} }
) )
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CODE): cv.string,
}
)
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]:
"""Validate the user input allows us to connect.""" """Validate the user input allows us to connect."""
session = aiohttp_client.async_create_clientsession(hass)
api = AmazonEchoApi( api = AmazonEchoApi(
session, data[CONF_COUNTRY],
data[CONF_USERNAME], data[CONF_USERNAME],
data[CONF_PASSWORD], data[CONF_PASSWORD],
) )
return await api.login_mode_interactive(data[CONF_CODE]) try:
data = await api.login_mode_interactive(data[CONF_CODE])
finally:
await api.close()
return data
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN): class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Alexa Devices.""" """Handle a config flow for Alexa Devices."""
VERSION = 1
MINOR_VERSION = 2
async def async_step_user( async def async_step_user(
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> ConfigFlowResult:
@@ -64,10 +55,10 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data = await validate_input(self.hass, user_input) data = await validate_input(self.hass, user_input)
except CannotConnect: except CannotConnect:
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError): except CannotAuthenticate:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except CannotRetrieveData: except WrongCountry:
errors["base"] = "cannot_retrieve_data" errors["base"] = "wrong_country"
else: else:
await self.async_set_unique_id(data["customer_info"]["user_id"]) await self.async_set_unique_id(data["customer_info"]["user_id"])
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
@@ -82,6 +73,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors, errors=errors,
data_schema=vol.Schema( data_schema=vol.Schema(
{ {
vol.Required(
CONF_COUNTRY, default=self.hass.config.country
): CountrySelector(),
vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CODE): cv.string, vol.Required(CONF_CODE): cv.string,
@@ -110,10 +104,8 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
await validate_input(self.hass, {**reauth_entry.data, **user_input}) await validate_input(self.hass, {**reauth_entry.data, **user_input})
except CannotConnect: except CannotConnect:
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError): except CannotAuthenticate:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
else: else:
return self.async_update_reload_and_abort( return self.async_update_reload_and_abort(
reauth_entry, reauth_entry,
@@ -130,47 +122,3 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=STEP_REAUTH_DATA_SCHEMA, data_schema=STEP_REAUTH_DATA_SCHEMA,
errors=errors, errors=errors,
) )
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the device."""
reconfigure_entry = self._get_reconfigure_entry()
if not user_input:
return self.async_show_form(
step_id="reconfigure",
data_schema=STEP_RECONFIGURE,
)
updated_password = user_input[CONF_PASSWORD]
self._async_abort_entries_match(
{CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME]}
)
errors: dict[str, str] = {}
try:
data = await validate_input(
self.hass, {**reconfigure_entry.data, **user_input}
)
except CannotConnect:
errors["base"] = "cannot_connect"
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
else:
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={
CONF_PASSWORD: updated_password,
CONF_LOGIN_DATA: data,
},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=STEP_RECONFIGURE,
errors=errors,
)

View File

@@ -6,22 +6,3 @@ _LOGGER = logging.getLogger(__package__)
DOMAIN = "alexa_devices" DOMAIN = "alexa_devices"
CONF_LOGIN_DATA = "login_data" CONF_LOGIN_DATA = "login_data"
DEFAULT_DOMAIN = "com"
COUNTRY_DOMAINS = {
"ar": DEFAULT_DOMAIN,
"at": DEFAULT_DOMAIN,
"au": "com.au",
"be": "com.be",
"br": DEFAULT_DOMAIN,
"gb": "co.uk",
"il": DEFAULT_DOMAIN,
"jp": "co.jp",
"mx": "com.mx",
"no": DEFAULT_DOMAIN,
"nz": "com.au",
"pl": DEFAULT_DOMAIN,
"tr": "com.tr",
"us": DEFAULT_DOMAIN,
"za": "co.za",
}

View File

@@ -8,10 +8,9 @@ from aioamazondevices.exceptions import (
CannotConnect, CannotConnect,
CannotRetrieveData, CannotRetrieveData,
) )
from aiohttp import ClientSession
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -32,7 +31,6 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
self, self,
hass: HomeAssistant, hass: HomeAssistant,
entry: AmazonConfigEntry, entry: AmazonConfigEntry,
session: ClientSession,
) -> None: ) -> None:
"""Initialize the scanner.""" """Initialize the scanner."""
super().__init__( super().__init__(
@@ -43,7 +41,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
update_interval=timedelta(seconds=SCAN_INTERVAL), update_interval=timedelta(seconds=SCAN_INTERVAL),
) )
self.api = AmazonEchoApi( self.api = AmazonEchoApi(
session, entry.data[CONF_COUNTRY],
entry.data[CONF_USERNAME], entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD], entry.data[CONF_PASSWORD],
entry.data[CONF_LOGIN_DATA], entry.data[CONF_LOGIN_DATA],
@@ -54,19 +52,9 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
try: try:
await self.api.login_mode_stored_data() await self.api.login_mode_stored_data()
return await self.api.get_devices_data() return await self.api.get_devices_data()
except CannotConnect as err: except (CannotConnect, CannotRetrieveData) as err:
raise UpdateFailed( raise UpdateFailed(f"Error occurred while updating {self.name}") from err
translation_domain=DOMAIN, except CannotAuthenticate as err:
translation_key="cannot_connect_with_error",
translation_placeholders={"error": repr(err)},
) from err
except CannotRetrieveData as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)},
) from err
except (CannotAuthenticate, TypeError) as err:
raise ConfigEntryAuthFailed( raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_domain=DOMAIN,
translation_key="invalid_auth", translation_key="invalid_auth",

View File

@@ -38,13 +38,5 @@
} }
} }
} }
},
"services": {
"send_sound": {
"service": "mdi:cast-audio"
},
"send_text_command": {
"service": "mdi:microphone-message"
}
} }
} }

View File

@@ -7,6 +7,6 @@
"integration_type": "hub", "integration_type": "hub",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["aioamazondevices"], "loggers": ["aioamazondevices"],
"quality_scale": "silver", "quality_scale": "bronze",
"requirements": ["aioamazondevices==6.0.0"] "requirements": ["aioamazondevices==3.2.3"]
} }

View File

@@ -28,39 +28,41 @@ rules:
# Silver # Silver
action-exceptions: done action-exceptions: done
config-entry-unloading: done config-entry-unloading: done
docs-configuration-parameters: done docs-configuration-parameters: todo
docs-installation-parameters: done docs-installation-parameters: todo
entity-unavailable: done entity-unavailable: done
integration-owner: done integration-owner: done
log-when-unavailable: done log-when-unavailable: done
parallel-updates: done parallel-updates: done
reauthentication-flow: done reauthentication-flow: done
test-coverage: done test-coverage:
status: todo
comment: all tests missing
# Gold # Gold
devices: done devices: done
diagnostics: done diagnostics: todo
discovery-update-info: discovery-update-info:
status: exempt status: exempt
comment: Network information not relevant comment: Network information not relevant
discovery: discovery:
status: exempt status: exempt
comment: There are a ton of mac address ranges in use, but also by kindles which are not supported by this integration comment: There are a ton of mac address ranges in use, but also by kindles which are not supported by this integration
docs-data-update: done docs-data-update: todo
docs-examples: done docs-examples: todo
docs-known-limitations: done docs-known-limitations: todo
docs-supported-devices: done docs-supported-devices: todo
docs-supported-functions: done docs-supported-functions: todo
docs-troubleshooting: done docs-troubleshooting: todo
docs-use-cases: done docs-use-cases: todo
dynamic-devices: todo dynamic-devices: todo
entity-category: done entity-category: done
entity-device-class: done entity-device-class: done
entity-disabled-by-default: done entity-disabled-by-default: done
entity-translations: done entity-translations: done
exception-translations: done exception-translations: todo
icon-translations: done icon-translations: done
reconfiguration-flow: done reconfiguration-flow: todo
repair-issues: repair-issues:
status: exempt status: exempt
comment: no known use cases for repair issues or flows, yet comment: no known use cases for repair issues or flows, yet
@@ -70,5 +72,5 @@ rules:
# Platinum # Platinum
async-dependency: done async-dependency: done
inject-websession: done inject-websession: todo
strict-typing: done strict-typing: done

View File

@@ -12,7 +12,6 @@ from homeassistant.components.sensor import (
SensorDeviceClass, SensorDeviceClass,
SensorEntity, SensorEntity,
SensorEntityDescription, SensorEntityDescription,
SensorStateClass,
) )
from homeassistant.const import LIGHT_LUX, UnitOfTemperature from homeassistant.const import LIGHT_LUX, UnitOfTemperature
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
@@ -42,13 +41,11 @@ SENSORS: Final = (
if device.sensors[_key].scale == "CELSIUS" if device.sensors[_key].scale == "CELSIUS"
else UnitOfTemperature.FAHRENHEIT else UnitOfTemperature.FAHRENHEIT
), ),
state_class=SensorStateClass.MEASUREMENT,
), ),
AmazonSensorEntityDescription( AmazonSensorEntityDescription(
key="illuminance", key="illuminance",
device_class=SensorDeviceClass.ILLUMINANCE, device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement=LIGHT_LUX, native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
), ),
) )

View File

@@ -1,116 +0,0 @@
"""Support for services."""
from aioamazondevices.sounds import SOUNDS_LIST
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_DEVICE_ID
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
from .const import DOMAIN
from .coordinator import AmazonConfigEntry
ATTR_TEXT_COMMAND = "text_command"
ATTR_SOUND = "sound"
SERVICE_TEXT_COMMAND = "send_text_command"
SERVICE_SOUND_NOTIFICATION = "send_sound"
SCHEMA_SOUND_SERVICE = vol.Schema(
{
vol.Required(ATTR_SOUND): cv.string,
vol.Required(ATTR_DEVICE_ID): cv.string,
},
)
SCHEMA_CUSTOM_COMMAND = vol.Schema(
{
vol.Required(ATTR_TEXT_COMMAND): cv.string,
vol.Required(ATTR_DEVICE_ID): cv.string,
}
)
@callback
def async_get_entry_id_for_service_call(
call: ServiceCall,
) -> tuple[dr.DeviceEntry, AmazonConfigEntry]:
"""Get the entry ID related to a service call (by device ID)."""
device_registry = dr.async_get(call.hass)
device_id = call.data[ATTR_DEVICE_ID]
if (device_entry := device_registry.async_get(device_id)) is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_device_id",
translation_placeholders={"device_id": device_id},
)
for entry_id in device_entry.config_entries:
if (entry := call.hass.config_entries.async_get_entry(entry_id)) is None:
continue
if entry.domain == DOMAIN:
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_loaded",
translation_placeholders={"entry": entry.title},
)
return (device_entry, entry)
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="config_entry_not_found",
translation_placeholders={"device_id": device_id},
)
async def _async_execute_action(call: ServiceCall, attribute: str) -> None:
"""Execute action on the device."""
device, config_entry = async_get_entry_id_for_service_call(call)
assert device.serial_number
value: str = call.data[attribute]
coordinator = config_entry.runtime_data
if attribute == ATTR_SOUND:
if value not in SOUNDS_LIST:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_sound_value",
translation_placeholders={"sound": value},
)
await coordinator.api.call_alexa_sound(
coordinator.data[device.serial_number], value
)
elif attribute == ATTR_TEXT_COMMAND:
await coordinator.api.call_alexa_text_command(
coordinator.data[device.serial_number], value
)
async def async_send_sound_notification(call: ServiceCall) -> None:
"""Send a sound notification to a AmazonDevice."""
await _async_execute_action(call, ATTR_SOUND)
async def async_send_text_command(call: ServiceCall) -> None:
"""Send a custom command to a AmazonDevice."""
await _async_execute_action(call, ATTR_TEXT_COMMAND)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the services for the Amazon Devices integration."""
for service_name, method, schema in (
(
SERVICE_SOUND_NOTIFICATION,
async_send_sound_notification,
SCHEMA_SOUND_SERVICE,
),
(
SERVICE_TEXT_COMMAND,
async_send_text_command,
SCHEMA_CUSTOM_COMMAND,
),
):
hass.services.async_register(DOMAIN, service_name, method, schema=schema)

View File

@@ -1,69 +0,0 @@
send_text_command:
fields:
device_id:
required: true
selector:
device:
integration: alexa_devices
text_command:
required: true
example: "Play B.B.C. on TuneIn"
selector:
text:
send_sound:
fields:
device_id:
required: true
selector:
device:
integration: alexa_devices
sound:
required: true
example: amzn_sfx_doorbell_chime
default: amzn_sfx_doorbell_chime
selector:
select:
options:
- air_horn_03
- amzn_sfx_cat_meow_1x_01
- amzn_sfx_church_bell_1x_02
- amzn_sfx_crowd_applause_01
- amzn_sfx_dog_med_bark_1x_02
- amzn_sfx_doorbell_01
- amzn_sfx_doorbell_chime_01
- amzn_sfx_doorbell_chime_02
- amzn_sfx_large_crowd_cheer_01
- amzn_sfx_lion_roar_02
- amzn_sfx_rooster_crow_01
- amzn_sfx_scifi_alarm_01
- amzn_sfx_scifi_alarm_04
- amzn_sfx_scifi_engines_on_02
- amzn_sfx_scifi_sheilds_up_01
- amzn_sfx_trumpet_bugle_04
- amzn_sfx_wolf_howl_02
- bell_02
- boing_01
- boing_03
- buzzers_pistols_01
- camera_01
- christmas_05
- clock_01
- futuristic_10
- halloween_bats
- halloween_crows
- halloween_footsteps
- halloween_wind
- halloween_wolf
- holiday_halloween_ghost
- horror_10
- med_system_alerts_minimal_dragon_short
- med_system_alerts_minimal_owl_short
- med_system_alerts_minimals_blue_wave_small
- med_system_alerts_minimals_galaxy_short
- med_system_alerts_minimals_panda_short
- med_system_alerts_minimals_tiger_short
- med_ui_success_generic_1-1
- squeaky_12
- zap_01
translation_key: sound

View File

@@ -1,21 +1,23 @@
{ {
"common": { "common": {
"data_code": "One-time password (OTP code)", "data_code": "One-time password (OTP code)",
"data_description_country": "The country where your Amazon account is registered.",
"data_description_username": "The email address of your Amazon account.", "data_description_username": "The email address of your Amazon account.",
"data_description_password": "The password of your Amazon account.", "data_description_password": "The password of your Amazon account.",
"data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported.", "data_description_code": "The one-time password to log in to your account. Currently, only tokens from OTP applications are supported."
"device_id_description": "The ID of the device to send the command to."
}, },
"config": { "config": {
"flow_title": "{username}", "flow_title": "{username}",
"step": { "step": {
"user": { "user": {
"data": { "data": {
"country": "[%key:common::config_flow::data::country%]",
"username": "[%key:common::config_flow::data::username%]", "username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]", "password": "[%key:common::config_flow::data::password%]",
"code": "[%key:component::alexa_devices::common::data_code%]" "code": "[%key:component::alexa_devices::common::data_code%]"
}, },
"data_description": { "data_description": {
"country": "[%key:component::alexa_devices::common::data_description_country%]",
"username": "[%key:component::alexa_devices::common::data_description_username%]", "username": "[%key:component::alexa_devices::common::data_description_username%]",
"password": "[%key:component::alexa_devices::common::data_description_password%]", "password": "[%key:component::alexa_devices::common::data_description_password%]",
"code": "[%key:component::alexa_devices::common::data_description_code%]" "code": "[%key:component::alexa_devices::common::data_description_code%]"
@@ -30,16 +32,6 @@
"password": "[%key:component::alexa_devices::common::data_description_password%]", "password": "[%key:component::alexa_devices::common::data_description_password%]",
"code": "[%key:component::alexa_devices::common::data_description_code%]" "code": "[%key:component::alexa_devices::common::data_description_code%]"
} }
},
"reconfigure": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"code": "[%key:component::alexa_devices::common::data_code%]"
},
"data_description": {
"password": "[%key:component::alexa_devices::common::data_description_password%]",
"code": "[%key:component::alexa_devices::common::data_description_code%]"
}
} }
}, },
"abort": { "abort": {
@@ -47,13 +39,12 @@
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
}, },
"error": { "error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"cannot_retrieve_data": "Unable to retrieve data from Amazon. Please try again later.",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"wrong_country": "Wrong country selected. Please select the country where your Amazon account is registered.",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
} }
}, },
@@ -92,105 +83,12 @@
} }
} }
}, },
"services": {
"send_sound": {
"name": "Send sound",
"description": "Sends a sound to a device",
"fields": {
"device_id": {
"name": "Device",
"description": "[%key:component::alexa_devices::common::device_id_description%]"
},
"sound": {
"name": "Alexa Skill sound file",
"description": "The sound file to play."
},
"sound_variant": {
"name": "Sound variant",
"description": "The variant of the sound to play."
}
}
},
"send_text_command": {
"name": "Send text command",
"description": "Sends a text command to a device",
"fields": {
"text_command": {
"name": "Alexa text command",
"description": "The text command to send."
},
"device_id": {
"name": "Device",
"description": "[%key:component::alexa_devices::common::device_id_description%]"
}
}
}
},
"selector": {
"sound": {
"options": {
"air_horn_03": "Air horn",
"amzn_sfx_cat_meow_1x_01": "Cat meow",
"amzn_sfx_church_bell_1x_02": "Church bell",
"amzn_sfx_crowd_applause_01": "Crowd applause",
"amzn_sfx_dog_med_bark_1x_02": "Dog bark",
"amzn_sfx_doorbell_01": "Doorbell 1",
"amzn_sfx_doorbell_chime_01": "Doorbell 2",
"amzn_sfx_doorbell_chime_02": "Doorbell 3",
"amzn_sfx_large_crowd_cheer_01": "Crowd cheers",
"amzn_sfx_lion_roar_02": "Lion roar",
"amzn_sfx_rooster_crow_01": "Rooster",
"amzn_sfx_scifi_alarm_01": "Sirens",
"amzn_sfx_scifi_alarm_04": "Red alert",
"amzn_sfx_scifi_engines_on_02": "Engines on",
"amzn_sfx_scifi_sheilds_up_01": "Shields up",
"amzn_sfx_trumpet_bugle_04": "Trumpet",
"amzn_sfx_wolf_howl_02": "Wolf howl",
"bell_02": "Bells",
"boing_01": "Boing 1",
"boing_03": "Boing 2",
"buzzers_pistols_01": "Buzzer",
"camera_01": "Camera",
"christmas_05": "Christmas bells",
"clock_01": "Ticking clock",
"futuristic_10": "Aircraft",
"halloween_bats": "Halloween bats",
"halloween_crows": "Halloween crows",
"halloween_footsteps": "Halloween spooky footsteps",
"halloween_wind": "Halloween wind",
"halloween_wolf": "Halloween wolf",
"holiday_halloween_ghost": "Halloween ghost",
"horror_10": "Halloween creepy door",
"med_system_alerts_minimal_dragon_short": "Friendly dragon",
"med_system_alerts_minimal_owl_short": "Happy owl",
"med_system_alerts_minimals_blue_wave_small": "Underwater World Sonata",
"med_system_alerts_minimals_galaxy_short": "Infinite Galaxy",
"med_system_alerts_minimals_panda_short": "Baby panda",
"med_system_alerts_minimals_tiger_short": "Playful tiger",
"med_ui_success_generic_1-1": "Success 1",
"squeaky_12": "Squeaky door",
"zap_01": "Zap"
}
}
},
"exceptions": { "exceptions": {
"cannot_connect_with_error": { "cannot_connect": {
"message": "Error connecting: {error}" "message": "Error connecting: {error}"
}, },
"cannot_retrieve_data_with_error": { "cannot_retrieve_data": {
"message": "Error retrieving data: {error}" "message": "Error retrieving data: {error}"
},
"device_serial_number_missing": {
"message": "Device serial number missing: {device_id}"
},
"invalid_device_id": {
"message": "Invalid device ID specified: {device_id}"
},
"invalid_sound_value": {
"message": "Invalid sound {sound} specified"
},
"entry_not_loaded": {
"message": "Entry not loaded: {entry}"
} }
} }
} }

View File

@@ -26,14 +26,14 @@ def alexa_api_call[_T: AmazonEntity, **_P](
self.coordinator.last_update_success = False self.coordinator.last_update_success = False
raise HomeAssistantError( raise HomeAssistantError(
translation_domain=DOMAIN, translation_domain=DOMAIN,
translation_key="cannot_connect_with_error", translation_key="cannot_connect",
translation_placeholders={"error": repr(err)}, translation_placeholders={"error": repr(err)},
) from err ) from err
except CannotRetrieveData as err: except CannotRetrieveData as err:
self.coordinator.last_update_success = False self.coordinator.last_update_success = False
raise HomeAssistantError( raise HomeAssistantError(
translation_domain=DOMAIN, translation_domain=DOMAIN,
translation_key="cannot_retrieve_data_with_error", translation_key="cannot_retrieve_data",
translation_placeholders={"error": repr(err)}, translation_placeholders={"error": repr(err)},
) from err ) from err

View File

@@ -2,22 +2,11 @@
import amberelectric import amberelectric
from homeassistant.components.sensor import ConfigType
from homeassistant.const import CONF_API_TOKEN from homeassistant.const import CONF_API_TOKEN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS from .const import CONF_SITE_ID, PLATFORMS
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
from .services import setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Amber component."""
setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:

View File

@@ -16,7 +16,7 @@ from homeassistant.helpers.selector import (
SelectSelectorMode, SelectSelectorMode,
) )
from .const import CONF_SITE_ID, CONF_SITE_NAME, DOMAIN, REQUEST_TIMEOUT from .const import CONF_SITE_ID, CONF_SITE_NAME, DOMAIN
API_URL = "https://app.amber.com.au/developers" API_URL = "https://app.amber.com.au/developers"
@@ -64,9 +64,7 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
api = amberelectric.AmberApi(api_client) api = amberelectric.AmberApi(api_client)
try: try:
sites: list[Site] = filter_sites( sites: list[Site] = filter_sites(api.get_sites())
api.get_sites(_request_timeout=REQUEST_TIMEOUT)
)
except amberelectric.ApiException as api_exception: except amberelectric.ApiException as api_exception:
if api_exception.status == 403: if api_exception.status == 403:
self._errors[CONF_API_TOKEN] = "invalid_api_token" self._errors[CONF_API_TOKEN] = "invalid_api_token"

View File

@@ -1,25 +1,14 @@
"""Amber Electric Constants.""" """Amber Electric Constants."""
import logging import logging
from typing import Final
from homeassistant.const import Platform from homeassistant.const import Platform
DOMAIN: Final = "amberelectric" DOMAIN = "amberelectric"
CONF_SITE_NAME = "site_name" CONF_SITE_NAME = "site_name"
CONF_SITE_ID = "site_id" CONF_SITE_ID = "site_id"
ATTR_CHANNEL_TYPE = "channel_type"
ATTRIBUTION = "Data provided by Amber Electric" ATTRIBUTION = "Data provided by Amber Electric"
LOGGER = logging.getLogger(__package__) LOGGER = logging.getLogger(__package__)
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
SERVICE_GET_FORECASTS = "get_forecasts"
GENERAL_CHANNEL = "general"
CONTROLLED_LOAD_CHANNEL = "controlled_load"
FEED_IN_CHANNEL = "feed_in"
REQUEST_TIMEOUT = 15

View File

@@ -10,14 +10,14 @@ from amberelectric.models.actual_interval import ActualInterval
from amberelectric.models.channel import ChannelType from amberelectric.models.channel import ChannelType
from amberelectric.models.current_interval import CurrentInterval from amberelectric.models.current_interval import CurrentInterval
from amberelectric.models.forecast_interval import ForecastInterval from amberelectric.models.forecast_interval import ForecastInterval
from amberelectric.models.price_descriptor import PriceDescriptor
from amberelectric.rest import ApiException from amberelectric.rest import ApiException
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import LOGGER, REQUEST_TIMEOUT from .const import LOGGER
from .helpers import normalize_descriptor
type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator] type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
@@ -49,6 +49,27 @@ def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) ->
return interval.channel_type == ChannelType.FEEDIN return interval.channel_type == ChannelType.FEEDIN
def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
if descriptor is None:
return None
if descriptor.value == "spike":
return "spike"
if descriptor.value == "high":
return "high"
if descriptor.value == "neutral":
return "neutral"
if descriptor.value == "low":
return "low"
if descriptor.value == "veryLow":
return "very_low"
if descriptor.value == "extremelyLow":
return "extremely_low"
if descriptor.value == "negative":
return "negative"
return None
class AmberUpdateCoordinator(DataUpdateCoordinator): class AmberUpdateCoordinator(DataUpdateCoordinator):
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read.""" """AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
@@ -82,11 +103,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
"grid": {}, "grid": {},
} }
try: try:
data = self._api.get_current_prices( data = self._api.get_current_prices(self.site_id, next=48)
self.site_id,
next=288,
_request_timeout=REQUEST_TIMEOUT,
)
intervals = [interval.actual_instance for interval in data] intervals = [interval.actual_instance for interval in data]
except ApiException as api_exception: except ApiException as api_exception:
raise UpdateFailed("Missing price data, skipping update") from api_exception raise UpdateFailed("Missing price data, skipping update") from api_exception

Some files were not shown because too many files have changed in this diff Show More