Compare commits

..

1 Commits

Author SHA1 Message Date
Paulus Schoutsen
d184037f5a Add delete support to media source 2025-09-23 22:42:15 -04:00
944 changed files with 17104 additions and 52353 deletions

View File

@@ -58,7 +58,6 @@ base_platforms: &base_platforms
# Extra components that trigger the full suite # Extra components that trigger the full suite
components: &components components: &components
- homeassistant/components/alexa/** - homeassistant/components/alexa/**
- homeassistant/components/analytics/**
- homeassistant/components/application_credentials/** - homeassistant/components/application_credentials/**
- homeassistant/components/assist_pipeline/** - homeassistant/components/assist_pipeline/**
- homeassistant/components/auth/** - homeassistant/components/auth/**

View File

@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -257,7 +257,7 @@ jobs:
fi fi
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -332,14 +332,14 @@ jobs:
- name: Login to DockerHub - name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant' if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant' if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -504,7 +504,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8 CACHE_VERSION: 8
UV_CACHE_VERSION: 1 UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.11" HA_SHORT_VERSION: "2025.10"
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']" ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version # 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
key: >- key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)" uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
key: >- key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache - name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.UV_CACHE_DIR }} path: ${{ env.UV_CACHE_DIR }}
key: >- key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}- env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists - name: Check if apt cache exists
id: cache-apt-check id: cache-apt-check
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }} lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: | path: |
@@ -570,7 +570,7 @@ jobs:
fi fi
- name: Save apt cache - name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true' if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base - base
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -711,7 +711,7 @@ jobs:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency review - name: Dependency review
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0 uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
with: with:
license-check: false # We use our own license audit checks license-check: false # We use our own license audit checks
@@ -741,7 +741,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore mypy cache - name: Restore mypy cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: .mypy_cache path: .mypy_cache
key: >- key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run name: Split tests for full run
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }} Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }} Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6 uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with: with:
languages: python languages: python
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6 uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with: with:
category: "/language:python" category: "/language:python"

View File

@@ -160,7 +160,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning # home-assistant/wheels doesn't support sha pinning
- name: Build wheels - name: Build wheels
uses: home-assistant/wheels@2025.09.1 uses: home-assistant/wheels@2025.07.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2
@@ -221,7 +221,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning # home-assistant/wheels doesn't support sha pinning
- name: Build wheels - name: Build wheels
uses: home-assistant/wheels@2025.09.1 uses: home-assistant/wheels@2025.07.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2

View File

@@ -203,7 +203,6 @@ homeassistant.components.feedreader.*
homeassistant.components.file_upload.* homeassistant.components.file_upload.*
homeassistant.components.filesize.* homeassistant.components.filesize.*
homeassistant.components.filter.* homeassistant.components.filter.*
homeassistant.components.firefly_iii.*
homeassistant.components.fitbit.* homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.* homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.* homeassistant.components.flux_led.*
@@ -326,7 +325,6 @@ homeassistant.components.london_underground.*
homeassistant.components.lookin.* homeassistant.components.lookin.*
homeassistant.components.lovelace.* homeassistant.components.lovelace.*
homeassistant.components.luftdaten.* homeassistant.components.luftdaten.*
homeassistant.components.lunatone.*
homeassistant.components.madvr.* homeassistant.components.madvr.*
homeassistant.components.manual.* homeassistant.components.manual.*
homeassistant.components.mastodon.* homeassistant.components.mastodon.*
@@ -445,7 +443,6 @@ homeassistant.components.rituals_perfume_genie.*
homeassistant.components.roborock.* homeassistant.components.roborock.*
homeassistant.components.roku.* homeassistant.components.roku.*
homeassistant.components.romy.* homeassistant.components.romy.*
homeassistant.components.route_b_smart_meter.*
homeassistant.components.rpi_power.* homeassistant.components.rpi_power.*
homeassistant.components.rss_feed_template.* homeassistant.components.rss_feed_template.*
homeassistant.components.russound_rio.* homeassistant.components.russound_rio.*
@@ -555,7 +552,6 @@ homeassistant.components.vacuum.*
homeassistant.components.vallox.* homeassistant.components.vallox.*
homeassistant.components.valve.* homeassistant.components.valve.*
homeassistant.components.velbus.* homeassistant.components.velbus.*
homeassistant.components.vivotek.*
homeassistant.components.vlc_telnet.* homeassistant.components.vlc_telnet.*
homeassistant.components.vodafone_station.* homeassistant.components.vodafone_station.*
homeassistant.components.volvo.* homeassistant.components.volvo.*

16
CODEOWNERS generated
View File

@@ -316,8 +316,6 @@ build.json @home-assistant/supervisor
/tests/components/crownstone/ @Crownstone @RicArch97 /tests/components/crownstone/ @Crownstone @RicArch97
/homeassistant/components/cups/ @fabaff /homeassistant/components/cups/ @fabaff
/tests/components/cups/ @fabaff /tests/components/cups/ @fabaff
/homeassistant/components/cync/ @Kinachi249
/tests/components/cync/ @Kinachi249
/homeassistant/components/daikin/ @fredrike /homeassistant/components/daikin/ @fredrike
/tests/components/daikin/ @fredrike /tests/components/daikin/ @fredrike
/homeassistant/components/date/ @home-assistant/core /homeassistant/components/date/ @home-assistant/core
@@ -412,8 +410,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/egardia/ @jeroenterheerdt /homeassistant/components/egardia/ @jeroenterheerdt
/homeassistant/components/eheimdigital/ @autinerd /homeassistant/components/eheimdigital/ @autinerd
/tests/components/eheimdigital/ @autinerd /tests/components/eheimdigital/ @autinerd
/homeassistant/components/ekeybionyx/ @richardpolzer
/tests/components/ekeybionyx/ @richardpolzer
/homeassistant/components/electrasmart/ @jafar-atili /homeassistant/components/electrasmart/ @jafar-atili
/tests/components/electrasmart/ @jafar-atili /tests/components/electrasmart/ @jafar-atili
/homeassistant/components/electric_kiwi/ @mikey0000 /homeassistant/components/electric_kiwi/ @mikey0000
@@ -492,8 +488,6 @@ build.json @home-assistant/supervisor
/tests/components/filesize/ @gjohansson-ST /tests/components/filesize/ @gjohansson-ST
/homeassistant/components/filter/ @dgomes /homeassistant/components/filter/ @dgomes
/tests/components/filter/ @dgomes /tests/components/filter/ @dgomes
/homeassistant/components/firefly_iii/ @erwindouna
/tests/components/firefly_iii/ @erwindouna
/homeassistant/components/fireservicerota/ @cyberjunky /homeassistant/components/fireservicerota/ @cyberjunky
/tests/components/fireservicerota/ @cyberjunky /tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP /homeassistant/components/firmata/ @DaAwesomeP
@@ -910,8 +904,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/luci/ @mzdrale /homeassistant/components/luci/ @mzdrale
/homeassistant/components/luftdaten/ @fabaff @frenck /homeassistant/components/luftdaten/ @fabaff @frenck
/tests/components/luftdaten/ @fabaff @frenck /tests/components/luftdaten/ @fabaff @frenck
/homeassistant/components/lunatone/ @MoonDevLT
/tests/components/lunatone/ @MoonDevLT
/homeassistant/components/lupusec/ @majuss @suaveolent /homeassistant/components/lupusec/ @majuss @suaveolent
/tests/components/lupusec/ @majuss @suaveolent /tests/components/lupusec/ @majuss @suaveolent
/homeassistant/components/lutron/ @cdheiser @wilburCForce /homeassistant/components/lutron/ @cdheiser @wilburCForce
@@ -957,8 +949,6 @@ build.json @home-assistant/supervisor
/tests/components/met_eireann/ @DylanGore /tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame /homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame /tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch /homeassistant/components/meteoalarm/ @rolfberkenbosch
/homeassistant/components/meteoclimatic/ @adrianmo /homeassistant/components/meteoclimatic/ @adrianmo
/tests/components/meteoclimatic/ @adrianmo /tests/components/meteoclimatic/ @adrianmo
@@ -982,6 +972,8 @@ build.json @home-assistant/supervisor
/tests/components/moat/ @bdraco /tests/components/moat/ @bdraco
/homeassistant/components/mobile_app/ @home-assistant/core /homeassistant/components/mobile_app/ @home-assistant/core
/tests/components/mobile_app/ @home-assistant/core /tests/components/mobile_app/ @home-assistant/core
/homeassistant/components/modbus/ @janiversen
/tests/components/modbus/ @janiversen
/homeassistant/components/modem_callerid/ @tkdrob /homeassistant/components/modem_callerid/ @tkdrob
/tests/components/modem_callerid/ @tkdrob /tests/components/modem_callerid/ @tkdrob
/homeassistant/components/modern_forms/ @wonderslug /homeassistant/components/modern_forms/ @wonderslug
@@ -1196,6 +1188,8 @@ build.json @home-assistant/supervisor
/tests/components/plex/ @jjlawren /tests/components/plex/ @jjlawren
/homeassistant/components/plugwise/ @CoMPaTech @bouwew /homeassistant/components/plugwise/ @CoMPaTech @bouwew
/tests/components/plugwise/ @CoMPaTech @bouwew /tests/components/plugwise/ @CoMPaTech @bouwew
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
/homeassistant/components/point/ @fredrike /homeassistant/components/point/ @fredrike
/tests/components/point/ @fredrike /tests/components/point/ @fredrike
/homeassistant/components/pooldose/ @lmaertin /homeassistant/components/pooldose/ @lmaertin
@@ -1338,8 +1332,6 @@ build.json @home-assistant/supervisor
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous /tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
/homeassistant/components/roon/ @pavoni /homeassistant/components/roon/ @pavoni
/tests/components/roon/ @pavoni /tests/components/roon/ @pavoni
/homeassistant/components/route_b_smart_meter/ @SeraphicRav
/tests/components/route_b_smart_meter/ @SeraphicRav
/homeassistant/components/rpi_power/ @shenxn @swetoast /homeassistant/components/rpi_power/ @shenxn @swetoast
/tests/components/rpi_power/ @shenxn @swetoast /tests/components/rpi_power/ @shenxn @swetoast
/homeassistant/components/rss_feed_template/ @home-assistant/core /homeassistant/components/rss_feed_template/ @home-assistant/core

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant image: ghcr.io/home-assistant/{arch}-homeassistant
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0 aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0 armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0 armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0 amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0 i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io

View File

@@ -616,25 +616,12 @@ async def async_enable_logging(
), ),
) )
logger = logging.getLogger() # Log errors to a file if we have write access to file or config dir
logger.setLevel(logging.INFO if verbose else logging.WARNING)
if log_file is None: if log_file is None:
default_log_path = hass.config.path(ERROR_LOG_FILENAME) err_log_path = hass.config.path(ERROR_LOG_FILENAME)
if "SUPERVISOR" in os.environ:
_LOGGER.info("Running in Supervisor, not logging to file")
# Rename the default log file if it exists, since previous versions created
# it even on Supervisor
if os.path.isfile(default_log_path):
with contextlib.suppress(OSError):
os.rename(default_log_path, f"{default_log_path}.old")
err_log_path = None
else:
err_log_path = default_log_path
else: else:
err_log_path = os.path.abspath(log_file) err_log_path = os.path.abspath(log_file)
if err_log_path:
err_path_exists = os.path.isfile(err_log_path) err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path) err_dir = os.path.dirname(err_log_path)
@@ -648,7 +635,10 @@ async def async_enable_logging(
) )
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger = logging.getLogger()
logger.addHandler(err_handler) logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Save the log file location for access by other components. # Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path hass.data[DATA_LOGGING] = err_log_path

View File

@@ -1,5 +0,0 @@
{
"domain": "eltako",
"name": "Eltako",
"iot_standards": ["matter"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "ibm",
"name": "IBM",
"integrations": ["watson_iot", "watson_tts"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "konnected",
"name": "Konnected",
"integrations": ["konnected", "konnected_esphome"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "level",
"name": "Level",
"iot_standards": ["matter"]
}

View File

@@ -4,9 +4,11 @@ from __future__ import annotations
from datetime import timedelta from datetime import timedelta
import logging import logging
from typing import cast
from aioacaia.acaiascale import AcaiaScale from aioacaia.acaiascale import AcaiaScale
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
from bleak import BleakScanner
from homeassistant.components.bluetooth import async_get_scanner from homeassistant.components.bluetooth import async_get_scanner
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
@@ -43,7 +45,7 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
name=entry.title, name=entry.title,
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
notify_callback=self.async_update_listeners, notify_callback=self.async_update_listeners,
scanner=async_get_scanner(hass), scanner=cast(BleakScanner, async_get_scanner(hass)),
) )
@property @property

View File

@@ -7,5 +7,5 @@
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["accuweather"], "loggers": ["accuweather"],
"requirements": ["accuweather==4.2.2"] "requirements": ["accuweather==4.2.1"]
} }

View File

@@ -4,18 +4,10 @@ from __future__ import annotations
from airos.airos8 import AirOS8 from airos.airos8 import AirOS8
from homeassistant.const import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [ _PLATFORMS: list[Platform] = [
@@ -29,16 +21,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
# By default airOS 8 comes with self-signed SSL certificates, # By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate. # with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession( session = async_get_clientsession(hass, verify_ssl=False)
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
)
airos_device = AirOS8( airos_device = AirOS8(
host=entry.data[CONF_HOST], host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME], username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD], password=entry.data[CONF_PASSWORD],
session=session, session=session,
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
) )
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device) coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
@@ -51,30 +40,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
return True return True
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Migrate old config entry."""
if entry.version > 1:
# This means the user has downgraded from a future version
return False
if entry.version == 1 and entry.minor_version == 1:
new_data = {**entry.data}
advanced_data = {
CONF_SSL: DEFAULT_SSL,
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
}
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
hass.config_entries.async_update_entry(
entry,
data=new_data,
minor_version=2,
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -2,7 +2,6 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Mapping
import logging import logging
from typing import Any from typing import Any
@@ -15,23 +14,11 @@ from airos.exceptions import (
) )
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS from .const import DOMAIN
from .coordinator import AirOS8 from .coordinator import AirOS8
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -41,15 +28,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_HOST): str, vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME, default="ubnt"): str, vol.Required(CONF_USERNAME, default="ubnt"): str,
vol.Required(CONF_PASSWORD): str, vol.Required(CONF_PASSWORD): str,
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
}
),
{"collapsed": True},
),
} }
) )
@@ -58,47 +36,23 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Ubiquiti airOS.""" """Handle a config flow for Ubiquiti airOS."""
VERSION = 1 VERSION = 1
MINOR_VERSION = 2
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
async def async_step_user( async def async_step_user(
self, user_input: dict[str, Any] | None = None self,
user_input: dict[str, Any] | None = None,
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Handle the manual input of host and credentials.""" """Handle the initial step."""
self.errors = {} errors: dict[str, str] = {}
if user_input is not None: if user_input is not None:
validated_info = await self._validate_and_get_device_info(user_input)
if validated_info:
return self.async_create_entry(
title=validated_info["title"],
data=validated_info["data"],
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
)
async def _validate_and_get_device_info(
self, config_data: dict[str, Any]
) -> dict[str, Any] | None:
"""Validate user input with the device API."""
# By default airOS 8 comes with self-signed SSL certificates, # By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate. # with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession( session = async_get_clientsession(self.hass, verify_ssl=False)
self.hass,
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8( airos_device = AirOS8(
host=config_data[CONF_HOST], host=user_input[CONF_HOST],
username=config_data[CONF_USERNAME], username=user_input[CONF_USERNAME],
password=config_data[CONF_PASSWORD], password=user_input[CONF_PASSWORD],
session=session, session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
) )
try: try:
await airos_device.login() await airos_device.login()
@@ -108,59 +62,21 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
AirOSConnectionSetupError, AirOSConnectionSetupError,
AirOSDeviceConnectionError, AirOSDeviceConnectionError,
): ):
self.errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError): except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
self.errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError: except AirOSKeyDataMissingError:
self.errors["base"] = "key_data_missing" errors["base"] = "key_data_missing"
except Exception: except Exception:
_LOGGER.exception("Unexpected exception during credential validation") _LOGGER.exception("Unexpected exception")
self.errors["base"] = "unknown" errors["base"] = "unknown"
else: else:
await self.async_set_unique_id(airos_data.derived.mac) await self.async_set_unique_id(airos_data.derived.mac)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
return self.async_create_entry(
return {"title": airos_data.host.hostname, "data": config_data} title=airos_data.host.hostname, data=user_input
return None
async def async_step_reauth(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm(user_input)
async def async_step_reauth_confirm(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
self.errors = {}
if user_input:
validate_data = {**self._get_reauth_entry().data, **user_input}
if await self._validate_and_get_device_info(config_data=validate_data):
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates=validate_data,
) )
return self.async_show_form( return self.async_show_form(
step_id="reauth_confirm", step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
errors=self.errors,
) )

View File

@@ -7,8 +7,3 @@ DOMAIN = "airos"
SCAN_INTERVAL = timedelta(minutes=1) SCAN_INTERVAL = timedelta(minutes=1)
MANUFACTURER = "Ubiquiti" MANUFACTURER = "Ubiquiti"
DEFAULT_VERIFY_SSL = False
DEFAULT_SSL = True
SECTION_ADVANCED_SETTINGS = "advanced_settings"

View File

@@ -14,7 +14,7 @@ from airos.exceptions import (
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL from .const import DOMAIN, SCAN_INTERVAL
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
try: try:
await self.airos_device.login() await self.airos_device.login()
return await self.airos_device.status() return await self.airos_device.status()
except AirOSConnectionAuthenticationError as err: except (AirOSConnectionAuthenticationError,) as err:
_LOGGER.exception("Error authenticating with airOS device") _LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryAuthFailed( raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="invalid_auth" translation_domain=DOMAIN, translation_key="invalid_auth"
) from err ) from err
except ( except (

View File

@@ -2,11 +2,11 @@
from __future__ import annotations from __future__ import annotations
from homeassistant.const import CONF_HOST, CONF_SSL from homeassistant.const import CONF_HOST
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS from .const import DOMAIN, MANUFACTURER
from .coordinator import AirOSDataUpdateCoordinator from .coordinator import AirOSDataUpdateCoordinator
@@ -20,14 +20,9 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
super().__init__(coordinator) super().__init__(coordinator)
airos_data = self.coordinator.data airos_data = self.coordinator.data
url_schema = (
"https"
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
else "http"
)
configuration_url: str | None = ( configuration_url: str | None = (
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}" f"https://{coordinator.config_entry.data[CONF_HOST]}"
) )
self._attr_device_info = DeviceInfo( self._attr_device_info = DeviceInfo(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos", "documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling", "iot_class": "local_polling",
"quality_scale": "bronze", "quality_scale": "bronze",
"requirements": ["airos==0.5.4"] "requirements": ["airos==0.5.1"]
} }

View File

@@ -2,14 +2,6 @@
"config": { "config": {
"flow_title": "Ubiquiti airOS device", "flow_title": "Ubiquiti airOS device",
"step": { "step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::airos::config::step::user::data_description::password%]"
}
},
"user": { "user": {
"data": { "data": {
"host": "[%key:common::config_flow::data::host%]", "host": "[%key:common::config_flow::data::host%]",
@@ -20,18 +12,6 @@
"host": "IP address or hostname of the airOS device", "host": "IP address or hostname of the airOS device",
"username": "Administrator username for the airOS device, normally 'ubnt'", "username": "Administrator username for the airOS device, normally 'ubnt'",
"password": "Password configured through the UISP app or web interface" "password": "Password configured through the UISP app or web interface"
},
"sections": {
"advanced_settings": {
"data": {
"ssl": "Use HTTPS",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"ssl": "Whether the connection should be encrypted (required for most devices)",
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
}
}
} }
} }
}, },
@@ -42,9 +22,7 @@
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
}, },
"abort": { "abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
} }
}, },
"entity": { "entity": {

View File

@@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
} }
) )
URL_API_INTEGRATION = {
"url": "https://dashboard.airthings.com/integrations/api-integration"
}
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Airthings.""" """Handle a config flow for Airthings."""
@@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form( return self.async_show_form(
step_id="user", step_id="user",
data_schema=STEP_USER_DATA_SCHEMA, data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders=URL_API_INTEGRATION, description_placeholders={
"url": (
"https://dashboard.airthings.com/integrations/api-integration"
),
},
) )
errors = {} errors = {}
@@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(title="Airthings", data=user_input) return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form( return self.async_show_form(
step_id="user", step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
description_placeholders=URL_API_INTEGRATION,
) )

View File

@@ -4,10 +4,10 @@
"user": { "user": {
"data": { "data": {
"id": "ID", "id": "ID",
"secret": "Secret" "secret": "Secret",
},
"description": "Login at {url} to find your credentials" "description": "Login at {url} to find your credentials"
} }
}
}, },
"error": { "error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",

View File

@@ -171,7 +171,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="no_devices_found") return self.async_abort(reason="no_devices_found")
titles = { titles = {
address: get_name(discovery.device) address: discovery.device.name
for (address, discovery) in self._discovered_devices.items() for (address, discovery) in self._discovered_devices.items()
} }
return self.async_show_form( return self.async_show_form(

View File

@@ -24,5 +24,5 @@
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble", "documentation": "https://www.home-assistant.io/integrations/airthings_ble",
"iot_class": "local_polling", "iot_class": "local_polling",
"requirements": ["airthings-ble==1.1.1"] "requirements": ["airthings-ble==0.9.2"]
} }

View File

@@ -114,8 +114,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
), ),
} }
PARALLEL_UPDATES = 0
@callback @callback
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:

View File

@@ -6,9 +6,6 @@
"description": "[%key:component::bluetooth::config::step::user::description%]", "description": "[%key:component::bluetooth::config::step::user::description%]",
"data": { "data": {
"address": "[%key:common::config_flow::data::device%]" "address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "The Airthings devices discovered via Bluetooth."
} }
}, },
"bluetooth_confirm": { "bluetooth_confirm": {

View File

@@ -2,14 +2,17 @@
from airtouch4pyapi import AirTouch from airtouch4pyapi import AirTouch
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator from .coordinator import AirtouchDataUpdateCoordinator
PLATFORMS = [Platform.CLIMATE] PLATFORMS = [Platform.CLIMATE]
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
"""Set up AirTouch4 from a config entry.""" """Set up AirTouch4 from a config entry."""
@@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
info = airtouch.GetAcs() info = airtouch.GetAcs()
if not info: if not info:
raise ConfigEntryNotReady raise ConfigEntryNotReady
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch) coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator entry.runtime_data = coordinator

View File

@@ -2,34 +2,26 @@
import logging import logging
from airtouch4pyapi import AirTouch
from airtouch4pyapi.airtouch import AirTouchStatus from airtouch4pyapi.airtouch import AirTouchStatus
from homeassistant.components.climate import SCAN_INTERVAL from homeassistant.components.climate import SCAN_INTERVAL
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator): class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Airtouch data.""" """Class to manage fetching Airtouch data."""
def __init__( def __init__(self, hass, airtouch):
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
) -> None:
"""Initialize global Airtouch data updater.""" """Initialize global Airtouch data updater."""
self.airtouch = airtouch self.airtouch = airtouch
super().__init__( super().__init__(
hass, hass,
_LOGGER, _LOGGER,
config_entry=entry,
name=DOMAIN, name=DOMAIN,
update_interval=SCAN_INTERVAL, update_interval=SCAN_INTERVAL,
) )

View File

@@ -6,19 +6,17 @@ from collections.abc import Callable
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any, Final from typing import Any, Final
from aioairzone.common import GrilleAngle, OperationMode, QAdapt, SleepTimeout from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout
from aioairzone.const import ( from aioairzone.const import (
API_COLD_ANGLE, API_COLD_ANGLE,
API_HEAT_ANGLE, API_HEAT_ANGLE,
API_MODE, API_MODE,
API_Q_ADAPT,
API_SLEEP, API_SLEEP,
AZD_COLD_ANGLE, AZD_COLD_ANGLE,
AZD_HEAT_ANGLE, AZD_HEAT_ANGLE,
AZD_MASTER, AZD_MASTER,
AZD_MODE, AZD_MODE,
AZD_MODES, AZD_MODES,
AZD_Q_ADAPT,
AZD_SLEEP, AZD_SLEEP,
AZD_ZONES, AZD_ZONES,
) )
@@ -67,14 +65,6 @@ SLEEP_DICT: Final[dict[str, int]] = {
"90m": SleepTimeout.SLEEP_90, "90m": SleepTimeout.SLEEP_90,
} }
Q_ADAPT_DICT: Final[dict[str, int]] = {
"standard": QAdapt.STANDARD,
"power": QAdapt.POWER,
"silence": QAdapt.SILENCE,
"minimum": QAdapt.MINIMUM,
"maximum": QAdapt.MAXIMUM,
}
def main_zone_options( def main_zone_options(
zone_data: dict[str, Any], zone_data: dict[str, Any],
@@ -93,14 +83,6 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
options_fn=main_zone_options, options_fn=main_zone_options,
translation_key="modes", translation_key="modes",
), ),
AirzoneSelectDescription(
api_param=API_Q_ADAPT,
entity_category=EntityCategory.CONFIG,
key=AZD_Q_ADAPT,
options=list(Q_ADAPT_DICT),
options_dict=Q_ADAPT_DICT,
translation_key="q_adapt",
),
) )

View File

@@ -63,16 +63,6 @@
"stop": "Stop" "stop": "Stop"
} }
}, },
"q_adapt": {
"name": "Q-Adapt",
"state": {
"standard": "Standard",
"power": "Power",
"silence": "Silence",
"minimum": "Minimum",
"maximum": "Maximum"
}
},
"sleep_times": { "sleep_times": {
"name": "Sleep", "name": "Sleep",
"state": { "state": {

View File

@@ -22,17 +22,6 @@ class OAuth2FlowHandler(
VERSION = CONFIG_FLOW_VERSION VERSION = CONFIG_FLOW_VERSION
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Check we have the cloud integration set up."""
if "cloud" not in self.hass.config.components:
return self.async_abort(
reason="cloud_not_enabled",
description_placeholders={"default_config": "default_config"},
)
return await super().async_step_user(user_input)
async def async_step_reauth( async def async_step_reauth(
self, user_input: Mapping[str, Any] self, user_input: Mapping[str, Any]
) -> ConfigFlowResult: ) -> ConfigFlowResult:

View File

@@ -24,8 +24,7 @@
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.", "wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
}, },
"create_entry": { "create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]" "default": "[%key:common::config_flow::create_entry::authenticated%]"

View File

@@ -10,7 +10,6 @@ from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import ( from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorDeviceClass, BinarySensorDeviceClass,
BinarySensorEntity, BinarySensorEntity,
BinarySensorEntityDescription, BinarySensorEntityDescription,
@@ -21,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity from .entity import AmazonEntity
from .utils import async_update_unique_id
# Coordinator is used to centralize the data updates # Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@@ -33,7 +31,6 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
is_on_fn: Callable[[AmazonDevice, str], bool] is_on_fn: Callable[[AmazonDevice, str], bool]
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
BINARY_SENSORS: Final = ( BINARY_SENSORS: Final = (
@@ -44,15 +41,46 @@ BINARY_SENSORS: Final = (
is_on_fn=lambda device, _: device.online, is_on_fn=lambda device, _: device.online,
), ),
AmazonBinarySensorEntityDescription( AmazonBinarySensorEntityDescription(
key="detectionState", key="bluetooth",
device_class=BinarySensorDeviceClass.MOTION, entity_category=EntityCategory.DIAGNOSTIC,
is_on_fn=lambda device, key: bool( translation_key="bluetooth",
device.sensors[key].value != SENSOR_STATE_OFF is_on_fn=lambda device, _: device.bluetooth_state,
), ),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None, is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False
), ),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="humanPresenceDetectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
), ),
) )
@@ -66,33 +94,12 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
# Replace unique id for "detectionState" binary sensor
await async_update_unique_id(
hass,
coordinator,
BINARY_SENSOR_DOMAIN,
"humanPresenceDetectionState",
"detectionState",
)
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc) AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in BINARY_SENSORS for sensor_desc in BINARY_SENSORS
for serial_num in new_devices for serial_num in coordinator.data
if sensor_desc.is_supported( if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
coordinator.data[serial_num], sensor_desc.key
) )
)
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity): class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
@@ -106,13 +113,3 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
return self.entity_description.is_on_fn( return self.entity_description.is_on_fn(
self.device, self.entity_description.key self.device, self.entity_description.key
) )
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data = await validate_input(self.hass, user_input) data = await validate_input(self.hass, user_input)
except CannotConnect: except CannotConnect:
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except CannotAuthenticate: except (CannotAuthenticate, TypeError):
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except CannotRetrieveData: except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data" errors["base"] = "cannot_retrieve_data"
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
) )
except CannotConnect: except CannotConnect:
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except CannotAuthenticate: except (CannotAuthenticate, TypeError):
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except CannotRetrieveData: except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data" errors["base"] = "cannot_retrieve_data"

View File

@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
translation_key="cannot_retrieve_data_with_error", translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)}, translation_placeholders={"error": repr(err)},
) from err ) from err
except CannotAuthenticate as err: except (CannotAuthenticate, TypeError) as err:
raise ConfigEntryAuthFailed( raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_domain=DOMAIN,
translation_key="invalid_auth", translation_key="invalid_auth",

View File

@@ -60,5 +60,7 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"online": device.online, "online": device.online,
"serial number": device.serial_number, "serial number": device.serial_number,
"software version": device.software_version, "software version": device.software_version,
"sensors": device.sensors, "do not disturb": device.do_not_disturb,
"response style": device.response_style,
"bluetooth state": device.bluetooth_state,
} }

View File

@@ -1,4 +1,44 @@
{ {
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth-off",
"state": {
"on": "mdi:bluetooth"
}
},
"baby_cry_detection": {
"default": "mdi:account-voice-off",
"state": {
"on": "mdi:account-voice"
}
},
"beeping_appliance_detection": {
"default": "mdi:bell-off",
"state": {
"on": "mdi:bell-ring"
}
},
"cough_detection": {
"default": "mdi:blur-off",
"state": {
"on": "mdi:blur"
}
},
"dog_bark_detection": {
"default": "mdi:dog-side-off",
"state": {
"on": "mdi:dog-side"
}
},
"water_sounds_detection": {
"default": "mdi:water-pump-off",
"state": {
"on": "mdi:water-pump"
}
}
}
},
"services": { "services": {
"send_sound": { "send_sound": {
"service": "mdi:cast-audio" "service": "mdi:cast-audio"

View File

@@ -7,6 +7,6 @@
"integration_type": "hub", "integration_type": "hub",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["aioamazondevices"], "loggers": ["aioamazondevices"],
"quality_scale": "platinum", "quality_scale": "silver",
"requirements": ["aioamazondevices==6.2.8"] "requirements": ["aioamazondevices==6.0.0"]
} }

View File

@@ -57,24 +57,14 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
AmazonNotifyEntity(coordinator, serial_num, sensor_desc) AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in NOTIFY for sensor_desc in NOTIFY
for serial_num in new_devices for serial_num in coordinator.data
if sensor_desc.subkey in coordinator.data[serial_num].capabilities if sensor_desc.subkey in coordinator.data[serial_num].capabilities
and sensor_desc.is_supported(coordinator.data[serial_num]) and sensor_desc.is_supported(coordinator.data[serial_num])
) )
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
class AmazonNotifyEntity(AmazonEntity, NotifyEntity): class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
"""Binary sensor notify platform.""" """Binary sensor notify platform."""

View File

@@ -53,7 +53,7 @@ rules:
docs-supported-functions: done docs-supported-functions: done
docs-troubleshooting: done docs-troubleshooting: done
docs-use-cases: done docs-use-cases: done
dynamic-devices: done dynamic-devices: todo
entity-category: done entity-category: done
entity-device-class: done entity-device-class: done
entity-disabled-by-default: done entity-disabled-by-default: done

View File

@@ -31,9 +31,6 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
"""Amazon Devices sensor entity description.""" """Amazon Devices sensor entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
SENSORS: Final = ( SENSORS: Final = (
@@ -65,23 +62,13 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
AmazonSensorEntity(coordinator, serial_num, sensor_desc) AmazonSensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in SENSORS for sensor_desc in SENSORS
for serial_num in new_devices for serial_num in coordinator.data
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
) )
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
class AmazonSensorEntity(AmazonEntity, SensorEntity): class AmazonSensorEntity(AmazonEntity, SensorEntity):
"""Sensor device.""" """Sensor device."""
@@ -102,13 +89,3 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
def native_value(self) -> StateType: def native_value(self) -> StateType:
"""Return the state of the sensor.""" """Return the state of the sensor."""
return self.device.sensors[self.entity_description.key].value return self.device.sensors[self.entity_description.key].value
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -58,6 +58,26 @@
} }
}, },
"entity": { "entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
},
"baby_cry_detection": {
"name": "Baby crying"
},
"beeping_appliance_detection": {
"name": "Beeping appliance"
},
"cough_detection": {
"name": "Coughing"
},
"dog_bark_detection": {
"name": "Dog barking"
},
"water_sounds_detection": {
"name": "Water sounds"
}
},
"notify": { "notify": {
"speak": { "speak": {
"name": "Speak" "name": "Speak"

View File

@@ -8,17 +8,13 @@ from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.api import AmazonDevice from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import ( from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
DOMAIN as SWITCH_DOMAIN,
SwitchEntity,
SwitchEntityDescription,
)
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity from .entity import AmazonEntity
from .utils import alexa_api_call, async_update_unique_id from .utils import alexa_api_call
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
@@ -28,17 +24,16 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
"""Alexa Devices switch entity description.""" """Alexa Devices switch entity description."""
is_on_fn: Callable[[AmazonDevice], bool] is_on_fn: Callable[[AmazonDevice], bool]
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( subkey: str
device.online and device.sensors[key].error is False
)
method: str method: str
SWITCHES: Final = ( SWITCHES: Final = (
AmazonSwitchEntityDescription( AmazonSwitchEntityDescription(
key="dnd", key="do_not_disturb",
subkey="AUDIO_PLAYER",
translation_key="do_not_disturb", translation_key="do_not_disturb",
is_on_fn=lambda device: bool(device.sensors["dnd"].value), is_on_fn=lambda _device: _device.do_not_disturb,
method="set_do_not_disturb", method="set_do_not_disturb",
), ),
) )
@@ -53,28 +48,13 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
# Replace unique id for "DND" switch and remove from Speaker Group
await async_update_unique_id(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
)
known_devices: set[str] = set()
def _check_device() -> None:
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
AmazonSwitchEntity(coordinator, serial_num, switch_desc) AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES for switch_desc in SWITCHES
for serial_num in new_devices for serial_num in coordinator.data
if switch_desc.key in coordinator.data[serial_num].sensors if switch_desc.subkey in coordinator.data[serial_num].capabilities
) )
_check_device()
entry.async_on_unload(coordinator.async_add_listener(_check_device))
class AmazonSwitchEntity(AmazonEntity, SwitchEntity): class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
"""Switch device.""" """Switch device."""
@@ -104,13 +84,3 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
def is_on(self) -> bool: def is_on(self) -> bool:
"""Return True if switch is on.""" """Return True if switch is on."""
return self.entity_description.is_on_fn(self.device) return self.entity_description.is_on_fn(self.device)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -6,12 +6,9 @@ from typing import Any, Concatenate
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er
from .const import _LOGGER, DOMAIN from .const import DOMAIN
from .coordinator import AmazonDevicesCoordinator
from .entity import AmazonEntity from .entity import AmazonEntity
@@ -41,23 +38,3 @@ def alexa_api_call[_T: AmazonEntity, **_P](
) from err ) from err
return cmd_wrapper return cmd_wrapper
async def async_update_unique_id(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
domain: str,
old_key: str,
new_key: str,
) -> None:
"""Update unique id for entities created with old format."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{old_key}"
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
_LOGGER.debug("Updating unique_id for %s", entity_id)
new_unique_id = unique_id.replace(old_key, new_key)
# Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)

View File

@@ -39,7 +39,7 @@ from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.singleton import singleton from homeassistant.helpers.singleton import singleton
from homeassistant.helpers.storage import Store from homeassistant.helpers.storage import Store
from homeassistant.helpers.system_info import async_get_system_info from homeassistant.helpers.system_info import async_get_system_info
from homeassistant.helpers.typing import UNDEFINED from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.loader import ( from homeassistant.loader import (
Integration, Integration,
IntegrationNotFound, IntegrationNotFound,
@@ -142,6 +142,7 @@ class EntityAnalyticsModifications:
""" """
remove: bool = False remove: bool = False
capabilities: dict[str, Any] | None | UndefinedType = UNDEFINED
class AnalyticsPlatformProtocol(Protocol): class AnalyticsPlatformProtocol(Protocol):
@@ -513,8 +514,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
integration_inputs: dict[str, tuple[list[str], list[str]]] = {} integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
integration_configs: dict[str, AnalyticsModifications] = {} integration_configs: dict[str, AnalyticsModifications] = {}
removed_devices: set[str] = set()
# Get device list # Get device list
for device_entry in dev_reg.devices.values(): for device_entry in dev_reg.devices.values():
if not device_entry.primary_config_entry: if not device_entry.primary_config_entry:
@@ -527,10 +526,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
if config_entry is None: if config_entry is None:
continue continue
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
removed_devices.add(device_entry.id)
continue
integration_domain = config_entry.domain integration_domain = config_entry.domain
integration_input = integration_inputs.setdefault(integration_domain, ([], [])) integration_input = integration_inputs.setdefault(integration_domain, ([], []))
@@ -543,23 +538,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
integration_input = integration_inputs.setdefault(integration_domain, ([], [])) integration_input = integration_inputs.setdefault(integration_domain, ([], []))
integration_input[1].append(entity_entry.entity_id) integration_input[1].append(entity_entry.entity_id)
integrations = {
domain: integration
for domain, integration in (
await async_get_integrations(hass, integration_inputs.keys())
).items()
if isinstance(integration, Integration)
}
# Filter out custom integrations and integrations that are not device or hub type
integration_inputs = {
domain: integration_info
for domain, integration_info in integration_inputs.items()
if (integration := integrations.get(domain)) is not None
and integration.is_built_in
and integration.manifest.get("integration_type") in ("device", "hub")
}
# Call integrations that implement the analytics platform # Call integrations that implement the analytics platform
for integration_domain, integration_input in integration_inputs.items(): for integration_domain, integration_input in integration_inputs.items():
if ( if (
@@ -620,15 +598,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
device_config = integration_config.devices.get(device_id, device_config) device_config = integration_config.devices.get(device_id, device_config)
if device_config.remove: if device_config.remove:
removed_devices.add(device_id)
continue continue
device_entry = dev_reg.devices[device_id] device_entry = dev_reg.devices[device_id]
device_id_mapping[device_id] = (integration_domain, len(devices_info)) device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
devices_info.append( devices_info.append(
{ {
"entities": [],
"entry_type": device_entry.entry_type, "entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None, "has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version, "hw_version": device_entry.hw_version,
@@ -637,7 +615,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"model_id": device_entry.model_id, "model_id": device_entry.model_id,
"sw_version": device_entry.sw_version, "sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id, "via_device": device_entry.via_device_id,
"entities": [],
} }
) )
@@ -676,41 +653,58 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
entity_entry = ent_reg.entities[entity_id] entity_entry = ent_reg.entities[entity_id]
entity_state = hass.states.get(entity_id) entity_state = hass.states.get(entity_entry.entity_id)
entity_info = { entity_info = {
# LIMITATION: `assumed_state` can be overridden by users; # LIMITATION: `assumed_state` can be overridden by users;
# we should replace it with the original value in the future. # we should replace it with the original value in the future.
# It is also not present, if entity is not in the state machine, # It is also not present, if entity is not in the state machine,
# which can happen for disabled entities. # which can happen for disabled entities.
"assumed_state": ( "assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
if entity_state is not None if entity_state is not None
else None else None,
), "capabilities": entity_config.capabilities
if entity_config.capabilities is not UNDEFINED
else entity_entry.capabilities,
"domain": entity_entry.domain, "domain": entity_entry.domain,
"entity_category": entity_entry.entity_category, "entity_category": entity_entry.entity_category,
"has_entity_name": entity_entry.has_entity_name, "has_entity_name": entity_entry.has_entity_name,
"modified_by_integration": ["capabilities"]
if entity_config.capabilities is not UNDEFINED
else None,
"original_device_class": entity_entry.original_device_class, "original_device_class": entity_entry.original_device_class,
# LIMITATION: `unit_of_measurement` can be overridden by users; # LIMITATION: `unit_of_measurement` can be overridden by users;
# we should replace it with the original value in the future. # we should replace it with the original value in the future.
"unit_of_measurement": entity_entry.unit_of_measurement, "unit_of_measurement": entity_entry.unit_of_measurement,
} }
if (device_id_ := entity_entry.device_id) is not None:
if device_id_ in removed_devices:
# The device was removed, so we remove the entity too
continue
if ( if (
new_device_id := device_id_mapping.get(device_id_) ((device_id_ := entity_entry.device_id) is not None)
) is not None and (new_device_id[0] == integration_domain): and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
and (new_device_id[0] == integration_domain)
):
device_info = devices_info[new_device_id[1]] device_info = devices_info[new_device_id[1]]
device_info["entities"].append(entity_info) device_info["entities"].append(entity_info)
continue else:
entities_info.append(entity_info) entities_info.append(entity_info)
integrations = {
domain: integration
for domain, integration in (
await async_get_integrations(hass, integrations_info.keys())
).items()
if isinstance(integration, Integration)
}
for domain, integration_info in integrations_info.items():
if integration := integrations.get(domain):
integration_info["is_custom_integration"] = not integration.is_built_in
# Include version for custom integrations
if not integration.is_built_in and integration.version:
integration_info["custom_integration_version"] = str(
integration.version
)
return { return {
"version": "home-assistant:1", "version": "home-assistant:1",
"home_assistant": HA_VERSION, "home_assistant": HA_VERSION,

View File

@@ -1308,9 +1308,7 @@ class PipelineRun:
# instead of a full response. # instead of a full response.
all_targets_in_satellite_area = ( all_targets_in_satellite_area = (
self._get_all_targets_in_satellite_area( self._get_all_targets_in_satellite_area(
conversation_result.response, conversation_result.response, self._device_id
self._satellite_id,
self._device_id,
) )
) )
@@ -1339,62 +1337,39 @@ class PipelineRun:
return (speech, all_targets_in_satellite_area) return (speech, all_targets_in_satellite_area)
def _get_all_targets_in_satellite_area( def _get_all_targets_in_satellite_area(
self, self, intent_response: intent.IntentResponse, device_id: str | None
intent_response: intent.IntentResponse,
satellite_id: str | None,
device_id: str | None,
) -> bool: ) -> bool:
"""Return true if all targeted entities were in the same area as the device.""" """Return true if all targeted entities were in the same area as the device."""
if ( if (
intent_response.response_type != intent.IntentResponseType.ACTION_DONE (intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
or not intent_response.matched_states or (not intent_response.matched_states)
or (not device_id)
):
return False
device_registry = dr.async_get(self.hass)
if (not (device := device_registry.async_get(device_id))) or (
not device.area_id
): ):
return False return False
entity_registry = er.async_get(self.hass) entity_registry = er.async_get(self.hass)
device_registry = dr.async_get(self.hass)
area_id: str | None = None
if (
satellite_id is not None
and (target_entity_entry := entity_registry.async_get(satellite_id))
is not None
):
area_id = target_entity_entry.area_id
device_id = target_entity_entry.device_id
if area_id is None:
if device_id is None:
return False
device_entry = device_registry.async_get(device_id)
if device_entry is None:
return False
area_id = device_entry.area_id
if area_id is None:
return False
for state in intent_response.matched_states: for state in intent_response.matched_states:
target_entity_entry = entity_registry.async_get(state.entity_id) entity = entity_registry.async_get(state.entity_id)
if target_entity_entry is None: if not entity:
return False return False
target_area_id = target_entity_entry.area_id if (entity_area_id := entity.area_id) is None:
if target_area_id is None: if (entity.device_id is None) or (
if target_entity_entry.device_id is None: (entity_device := device_registry.async_get(entity.device_id))
is None
):
return False return False
target_device_entry = device_registry.async_get( entity_area_id = entity_device.area_id
target_entity_entry.device_id
)
if target_device_entry is None:
return False
target_area_id = target_device_entry.area_id if entity_area_id != device.area_id:
if target_area_id != area_id:
return False return False
return True return True

View File

@@ -2,7 +2,9 @@
from __future__ import annotations from __future__ import annotations
from typing import Any from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = { TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes", "wan_rx": "sensor_rx_bytes",
@@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")} return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T: def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists.""" """Translate raw data to legacy format for dicts and lists."""
if raw is None: if raw is None:

View File

@@ -0,0 +1,24 @@
"""Analytics platform."""
from homeassistant.components.analytics import (
AnalyticsInput,
AnalyticsModifications,
EntityAnalyticsModifications,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_modify_analytics(
hass: HomeAssistant, analytics_input: AnalyticsInput
) -> AnalyticsModifications:
"""Modify the analytics."""
ent_reg = er.async_get(hass)
entities: dict[str, EntityAnalyticsModifications] = {}
for entity_id in analytics_input.entity_ids:
entity_entry = ent_reg.entities[entity_id]
if entity_entry.capabilities is not None:
entities[entity_id] = EntityAnalyticsModifications(capabilities=None)
return AnalyticsModifications(entities=entities)

View File

@@ -26,6 +26,9 @@ async def async_setup_entry(
if CONF_HOST in config_entry.data: if CONF_HOST in config_entry.data:
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session) coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
else: else:
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session) coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
@@ -33,11 +36,6 @@ async def async_setup_entry(
config_entry.runtime_data = coordinator config_entry.runtime_data = coordinator
if CONF_HOST in config_entry.data:
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True return True

View File

@@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame from homeassistant.helpers import frame
from homeassistant.util import slugify from homeassistant.util import slugify
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
from . import util from . import util
from .agent import BackupAgent from .agent import BackupAgent
@@ -145,7 +144,7 @@ class DownloadBackupView(HomeAssistantView):
return Response(status=HTTPStatus.NOT_FOUND) return Response(status=HTTPStatus.NOT_FOUND)
else: else:
stream = await agent.async_download_backup(backup_id) stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream)) reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
worker_done_event = asyncio.Event() worker_done_event = asyncio.Event()
@@ -153,7 +152,7 @@ class DownloadBackupView(HomeAssistantView):
"""Call by the worker thread when it's done.""" """Call by the worker thread when it's done."""
hass.loop.call_soon_threadsafe(worker_done_event.set) hass.loop.call_soon_threadsafe(worker_done_event.set)
stream = AsyncIteratorWriter(hass.loop) stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread( worker = threading.Thread(
target=util.decrypt_backup, target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []], args=[backup, reader, stream, password, on_done, 0, []],

View File

@@ -38,7 +38,6 @@ from homeassistant.helpers import (
) )
from homeassistant.helpers.json import json_bytes from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util from homeassistant.util import dt as dt_util, json as json_util
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import util as backup_util from . import util as backup_util
from .agent import ( from .agent import (
@@ -73,6 +72,7 @@ from .models import (
) )
from .store import BackupStore from .store import BackupStore
from .util import ( from .util import (
AsyncIteratorReader,
DecryptedBackupStreamer, DecryptedBackupStreamer,
EncryptedBackupStreamer, EncryptedBackupStreamer,
make_backup_dir, make_backup_dir,
@@ -1525,7 +1525,7 @@ class BackupManager:
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb") reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
else: else:
backup_stream = await agent.async_download_backup(backup_id) backup_stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream)) reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
try: try:
await self.hass.async_add_executor_job( await self.hass.async_add_executor_job(
validate_password_stream, reader, password validate_password_stream, reader, password

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import asyncio import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine from collections.abc import AsyncIterator, Callable, Coroutine
from concurrent.futures import CancelledError, Future
import copy import copy
from dataclasses import dataclass, replace from dataclasses import dataclass, replace
from io import BytesIO from io import BytesIO
@@ -13,7 +14,7 @@ from pathlib import Path, PurePath
from queue import SimpleQueue from queue import SimpleQueue
import tarfile import tarfile
import threading import threading
from typing import IO, Any, cast from typing import IO, Any, Self, cast
import aiohttp import aiohttp
from securetar import SecureTarError, SecureTarFile, SecureTarReadError from securetar import SecureTarError, SecureTarFile, SecureTarReadError
@@ -22,11 +23,6 @@ from homeassistant.backup_restore import password_to_key
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util from homeassistant.util import dt as dt_util
from homeassistant.util.async_iterator import (
Abort,
AsyncIteratorReader,
AsyncIteratorWriter,
)
from homeassistant.util.json import JsonObjectType, json_loads_object from homeassistant.util.json import JsonObjectType, json_loads_object
from .const import BUF_SIZE, LOGGER from .const import BUF_SIZE, LOGGER
@@ -63,6 +59,12 @@ class BackupEmpty(DecryptError):
_message = "No tar files found in the backup." _message = "No tar files found in the backup."
class AbortCipher(HomeAssistantError):
"""Abort the cipher operation."""
_message = "Abort cipher operation."
def make_backup_dir(path: Path) -> None: def make_backup_dir(path: Path) -> None:
"""Create a backup directory if it does not exist.""" """Create a backup directory if it does not exist."""
path.mkdir(exist_ok=True) path.mkdir(exist_ok=True)
@@ -164,6 +166,106 @@ def validate_password(path: Path, password: str | None) -> bool:
return False return False
class AsyncIteratorReader:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
def abort(self) -> None:
"""Abort the reader."""
self._aborted = True
if self._next_future is not None:
self._next_future.cancel()
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
)
if self._aborted:
self._next_future.cancel()
raise AbortCipher
try:
self._buffer = self._next_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)
n -= len(chunk)
self._pos += len(chunk)
if self._pos == len(self._buffer):
self._buffer = None
return bytes(result)
def close(self) -> None:
"""Close the iterator."""
class AsyncIteratorWriter:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
return self
async def __anext__(self) -> bytes:
"""Get the next chunk from the iterator."""
if data := await self._queue.get():
return data
raise StopAsyncIteration
def abort(self) -> None:
"""Abort the writer."""
self._aborted = True
if self._write_future is not None:
self._write_future.cancel()
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
self._write_future = asyncio.run_coroutine_threadsafe(
self._queue.put(s), self._hass.loop
)
if self._aborted:
self._write_future.cancel()
raise AbortCipher
try:
self._write_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos += len(s)
return len(s)
def validate_password_stream( def validate_password_stream(
input_stream: IO[bytes], input_stream: IO[bytes],
password: str | None, password: str | None,
@@ -240,7 +342,7 @@ def decrypt_backup(
finally: finally:
# Write an empty chunk to signal the end of the stream # Write an empty chunk to signal the end of the stream
output_stream.write(b"") output_stream.write(b"")
except Abort: except AbortCipher:
LOGGER.debug("Cipher operation aborted") LOGGER.debug("Cipher operation aborted")
finally: finally:
on_done(error) on_done(error)
@@ -328,7 +430,7 @@ def encrypt_backup(
finally: finally:
# Write an empty chunk to signal the end of the stream # Write an empty chunk to signal the end of the stream
output_stream.write(b"") output_stream.write(b"")
except Abort: except AbortCipher:
LOGGER.debug("Cipher operation aborted") LOGGER.debug("Cipher operation aborted")
finally: finally:
on_done(error) on_done(error)
@@ -455,8 +557,8 @@ class _CipherBackupStreamer:
self._hass.loop.call_soon_threadsafe(worker_status.done.set) self._hass.loop.call_soon_threadsafe(worker_status.done.set)
stream = await self._open_stream() stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass.loop, stream) reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass.loop) writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread( worker = threading.Thread(
target=self._cipher_func, target=self._cipher_func,
args=[ args=[

View File

@@ -73,12 +73,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
# Add the websocket and API client # Add the websocket and API client
entry.runtime_data = BangOlufsenData(websocket, client) entry.runtime_data = BangOlufsenData(websocket, client)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) # Start WebSocket connection
# Start WebSocket connection once the platforms have been loaded.
# This ensures that the initial WebSocket notifications are dispatched to entities
await client.connect_notifications(remote_control=True, reconnect=True) await client.connect_notifications(remote_control=True, reconnect=True)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True return True

View File

@@ -125,8 +125,7 @@ async def async_setup_entry(
async_add_entities( async_add_entities(
new_entities=[ new_entities=[
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
], ]
update_before_add=True,
) )
# Register actions. # Register actions.
@@ -267,8 +266,34 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._software_status.software_version, self._software_status.software_version,
) )
# Get overall device state once. This is handled by WebSocket events the rest of the time.
product_state = await self._client.get_product_state()
# Get volume information.
if product_state.volume:
self._volume = product_state.volume
# Get all playback information.
# Ensure that the metadata is not None upon startup
if product_state.playback:
if product_state.playback.metadata:
self._playback_metadata = product_state.playback.metadata
self._remote_leader = product_state.playback.metadata.remote_leader
if product_state.playback.progress:
self._playback_progress = product_state.playback.progress
if product_state.playback.source:
self._source_change = product_state.playback.source
if product_state.playback.state:
self._playback_state = product_state.playback.state
# Set initial state
if self._playback_state.value:
self._state = self._playback_state.value
self._attr_media_position_updated_at = utcnow() self._attr_media_position_updated_at = utcnow()
# Get the highest resolution available of the given images.
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
# If the device has been updated with new sources, then the API will fail here. # If the device has been updated with new sources, then the API will fail here.
await self._async_update_sources() await self._async_update_sources()

View File

@@ -3,12 +3,16 @@ beolink_allstandby:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
beolink_expand: beolink_expand:
target: target:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
fields: fields:
all_discovered: all_discovered:
required: false required: false
@@ -33,6 +37,8 @@ beolink_join:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
fields: fields:
jid_options: jid_options:
collapsed: false collapsed: false
@@ -65,12 +71,16 @@ beolink_leave:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
beolink_unexpand: beolink_unexpand:
target: target:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
fields: fields:
jid_options: jid_options:
collapsed: false collapsed: false

View File

@@ -272,13 +272,6 @@ async def async_setup_entry(
observations: list[ConfigType] = [ observations: list[ConfigType] = [
dict(subentry.data) for subentry in config_entry.subentries.values() dict(subentry.data) for subentry in config_entry.subentries.values()
] ]
for observation in observations:
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
observation[CONF_VALUE_TEMPLATE] = Template(
observation[CONF_VALUE_TEMPLATE], hass
)
prior: float = config[CONF_PRIOR] prior: float = config[CONF_PRIOR]
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD] probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)

View File

@@ -13,30 +13,20 @@ from bluecurrent_api.exceptions import (
RequestLimitReached, RequestLimitReached,
WebsocketError, WebsocketError,
) )
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_TOKEN, CONF_DEVICE_ID, Platform from homeassistant.const import CONF_API_TOKEN, Platform
from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ( from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
ConfigEntryAuthFailed,
ConfigEntryNotReady,
ServiceValidationError,
)
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import ConfigType
from .const import ( from .const import (
BCU_APP,
CHARGEPOINT_SETTINGS, CHARGEPOINT_SETTINGS,
CHARGEPOINT_STATUS, CHARGEPOINT_STATUS,
CHARGING_CARD_ID,
DOMAIN, DOMAIN,
EVSE_ID, EVSE_ID,
LOGGER, LOGGER,
PLUG_AND_CHARGE, PLUG_AND_CHARGE,
SERVICE_START_CHARGE_SESSION,
VALUE, VALUE,
) )
@@ -44,7 +34,6 @@ type BlueCurrentConfigEntry = ConfigEntry[Connector]
PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH]
CHARGE_POINTS = "CHARGE_POINTS" CHARGE_POINTS = "CHARGE_POINTS"
CHARGE_CARDS = "CHARGE_CARDS"
DATA = "data" DATA = "data"
DELAY = 5 DELAY = 5
@@ -52,16 +41,6 @@ GRID = "GRID"
OBJECT = "object" OBJECT = "object"
VALUE_TYPES = [CHARGEPOINT_STATUS, CHARGEPOINT_SETTINGS] VALUE_TYPES = [CHARGEPOINT_STATUS, CHARGEPOINT_SETTINGS]
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
SERVICE_START_CHARGE_SESSION_SCHEMA = vol.Schema(
{
vol.Required(CONF_DEVICE_ID): cv.string,
# When no charging card is provided, use no charging card (BCU_APP = no charging card).
vol.Optional(CHARGING_CARD_ID, default=BCU_APP): cv.string,
}
)
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, config_entry: BlueCurrentConfigEntry hass: HomeAssistant, config_entry: BlueCurrentConfigEntry
@@ -88,66 +67,6 @@ async def async_setup_entry(
return True return True
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Blue Current."""
async def start_charge_session(service_call: ServiceCall) -> None:
"""Start a charge session with the provided device and charge card ID."""
# When no charge card is provided, use the default charge card set in the config flow.
charging_card_id = service_call.data[CHARGING_CARD_ID]
device_id = service_call.data[CONF_DEVICE_ID]
# Get the device based on the given device ID.
device = dr.async_get(hass).devices.get(device_id)
if device is None:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="invalid_device_id"
)
blue_current_config_entry: ConfigEntry | None = None
for config_entry_id in device.config_entries:
config_entry = hass.config_entries.async_get_entry(config_entry_id)
if not config_entry or config_entry.domain != DOMAIN:
# Not the blue_current config entry.
continue
if config_entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="config_entry_not_loaded"
)
blue_current_config_entry = config_entry
break
if not blue_current_config_entry:
# The device is not connected to a valid blue_current config entry.
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="no_config_entry"
)
connector = blue_current_config_entry.runtime_data
# Get the evse_id from the identifier of the device.
evse_id = next(
identifier[1]
for identifier in device.identifiers
if identifier[0] == DOMAIN
)
await connector.client.start_session(evse_id, charging_card_id)
hass.services.async_register(
DOMAIN,
SERVICE_START_CHARGE_SESSION,
start_charge_session,
SERVICE_START_CHARGE_SESSION_SCHEMA,
)
return True
async def async_unload_entry( async def async_unload_entry(
hass: HomeAssistant, config_entry: BlueCurrentConfigEntry hass: HomeAssistant, config_entry: BlueCurrentConfigEntry
) -> bool: ) -> bool:
@@ -168,7 +87,6 @@ class Connector:
self.client = client self.client = client
self.charge_points: dict[str, dict] = {} self.charge_points: dict[str, dict] = {}
self.grid: dict[str, Any] = {} self.grid: dict[str, Any] = {}
self.charge_cards: dict[str, dict[str, Any]] = {}
async def on_data(self, message: dict) -> None: async def on_data(self, message: dict) -> None:
"""Handle received data.""" """Handle received data."""

View File

@@ -8,12 +8,6 @@ LOGGER = logging.getLogger(__package__)
EVSE_ID = "evse_id" EVSE_ID = "evse_id"
MODEL_TYPE = "model_type" MODEL_TYPE = "model_type"
CARD = "card"
UID = "uid"
BCU_APP = "BCU-APP"
WITHOUT_CHARGING_CARD = "without_charging_card"
CHARGING_CARD_ID = "charging_card_id"
SERVICE_START_CHARGE_SESSION = "start_charge_session"
PLUG_AND_CHARGE = "plug_and_charge" PLUG_AND_CHARGE = "plug_and_charge"
VALUE = "value" VALUE = "value"
PERMISSION = "permission" PERMISSION = "permission"

View File

@@ -42,10 +42,5 @@
"default": "mdi:lock" "default": "mdi:lock"
} }
} }
},
"services": {
"start_charge_session": {
"service": "mdi:play"
}
} }
} }

View File

@@ -1,12 +0,0 @@
start_charge_session:
fields:
device_id:
selector:
device:
integration: blue_current
required: true
charging_card_id:
selector:
text:
required: false

View File

@@ -22,16 +22,6 @@
"wrong_account": "Wrong account: Please authenticate with the API token for {email}." "wrong_account": "Wrong account: Please authenticate with the API token for {email}."
} }
}, },
"options": {
"step": {
"init": {
"data": {
"card": "Card"
},
"description": "Select the default charging card you want to use"
}
}
},
"entity": { "entity": {
"sensor": { "sensor": {
"activity": { "activity": {
@@ -146,39 +136,5 @@
"name": "Block charge point" "name": "Block charge point"
} }
} }
},
"selector": {
"select_charging_card": {
"options": {
"without_charging_card": "Without charging card"
}
}
},
"services": {
"start_charge_session": {
"name": "Start charge session",
"description": "Starts a new charge session on a specified charge point.",
"fields": {
"charging_card_id": {
"name": "Charging card ID",
"description": "Optional charging card ID that will be used to start a charge session. When not provided, no charging card will be used."
},
"device_id": {
"name": "Device ID",
"description": "The ID of the Blue Current charge point."
}
}
}
},
"exceptions": {
"invalid_device_id": {
"message": "Invalid device ID given."
},
"config_entry_not_loaded": {
"message": "Config entry not loaded."
},
"no_config_entry": {
"message": "Device has not a valid blue_current config entry."
}
} }
} }

View File

@@ -10,7 +10,6 @@ from asyncio import Future
from collections.abc import Callable, Iterable from collections.abc import Callable, Iterable
from typing import TYPE_CHECKING, cast from typing import TYPE_CHECKING, cast
from bleak import BleakScanner
from habluetooth import ( from habluetooth import (
BaseHaScanner, BaseHaScanner,
BluetoothScannerDevice, BluetoothScannerDevice,
@@ -39,16 +38,13 @@ def _get_manager(hass: HomeAssistant) -> HomeAssistantBluetoothManager:
@hass_callback @hass_callback
def async_get_scanner(hass: HomeAssistant) -> BleakScanner: def async_get_scanner(hass: HomeAssistant) -> HaBleakScannerWrapper:
"""Return a HaBleakScannerWrapper cast to BleakScanner. """Return a HaBleakScannerWrapper.
This is a wrapper around our BleakScanner singleton that allows This is a wrapper around our BleakScanner singleton that allows
multiple integrations to share the same BleakScanner. multiple integrations to share the same BleakScanner.
The wrapper is cast to BleakScanner for type compatibility with
libraries expecting a BleakScanner instance.
""" """
return cast(BleakScanner, HaBleakScannerWrapper()) return HaBleakScannerWrapper()
@hass_callback @hass_callback

View File

@@ -19,8 +19,8 @@
"bleak-retry-connector==4.4.3", "bleak-retry-connector==4.4.3",
"bluetooth-adapters==2.1.0", "bluetooth-adapters==2.1.0",
"bluetooth-auto-recovery==1.5.3", "bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.3", "bluetooth-data-tools==1.28.2",
"dbus-fast==2.44.5", "dbus-fast==2.44.3",
"habluetooth==5.7.0" "habluetooth==5.6.4"
] ]
} }

View File

@@ -315,7 +315,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.register_view(CalendarListView(component)) hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component)) hass.http.register_view(CalendarEventView(component))
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar") frontend.async_register_built_in_panel(
hass, "calendar", "calendar", "hass:calendar"
)
websocket_api.async_register_command(hass, handle_calendar_event_create) websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete) websocket_api.async_register_command(hass, handle_calendar_event_delete)

View File

@@ -51,6 +51,12 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.event import async_track_time_interval
@@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename"
ATTR_MEDIA_PLAYER: Final = "media_player" ATTR_MEDIA_PLAYER: Final = "media_player"
ATTR_FORMAT: Final = "format" ATTR_FORMAT: Final = "format"
# These constants are deprecated as of Home Assistant 2024.10
# Please use the StreamType enum instead.
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
class CameraEntityFeature(IntFlag): class CameraEntityFeature(IntFlag):
"""Supported features of the camera entity.""" """Supported features of the camera entity."""
@@ -1105,3 +1117,11 @@ async def async_handle_record_service(
duration=service_call.data[CONF_DURATION], duration=service_call.data[CONF_DURATION],
lookback=service_call.data[CONF_LOOKBACK], lookback=service_call.data[CONF_LOOKBACK],
) )
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -53,6 +53,7 @@ from .const import (
CONF_ACME_SERVER, CONF_ACME_SERVER,
CONF_ALEXA, CONF_ALEXA,
CONF_ALIASES, CONF_ALIASES,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID, CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG, CONF_ENTITY_CONFIG,
CONF_FILTER, CONF_FILTER,
@@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str, vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str, vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str, vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str, vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str, vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str, vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,

View File

@@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id"
CONF_ACCOUNT_LINK_SERVER = "account_link_server" CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server" CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server" CONF_ACME_SERVER = "acme_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server" CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server" CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server" CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -13,6 +13,6 @@
"integration_type": "system", "integration_type": "system",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"], "loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.2.0"], "requirements": ["hass-nabucasa==1.1.1"],
"single_config_entry": true "single_config_entry": true
} }

View File

@@ -1,106 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
The integration does not provide any actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
Stale docstring and test name: `test_form_home` and reusing result.
Extract `async_setup_entry` into own fixture.
Avoid importing `config_flow` in tests.
Test reauth with errors
config-flow:
status: todo
comment: |
The config flow misses data descriptions.
Remove URLs from data descriptions, they should be replaced with placeholders.
Make use of Electricity Maps zone keys in country code as dropdown.
Make use of location selector for coordinates.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: todo
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
`test_sensor` could use `snapshot_platform`
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: |
This integration connect to a single device per configuration entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -29,24 +29,11 @@ async def async_setup_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data["alarm_zones"])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
ComelitVedoBinarySensorEntity( ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
coordinator, device, config_entry.entry_id
)
for device in coordinator.data["alarm_zones"].values() for device in coordinator.data["alarm_zones"].values()
if device.index in new_devices
) )
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitVedoBinarySensorEntity( class ComelitVedoBinarySensorEntity(
CoordinatorEntity[ComelitVedoSystem], BinarySensorEntity CoordinatorEntity[ComelitVedoSystem], BinarySensorEntity

View File

@@ -25,27 +25,23 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
from .utils import async_client_session from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252" DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = "111111" DEFAULT_PIN = 111111
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema( USER_SCHEMA = vol.Schema(
{ {
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex), vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST), vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
} }
) )
STEP_REAUTH_DATA_SCHEMA = vol.Schema( STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_RECONFIGURE = vol.Schema( STEP_RECONFIGURE = vol.Schema(
{ {
vol.Required(CONF_HOST): cv.string, vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port, vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex), vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
} }
) )

View File

@@ -29,22 +29,11 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data[COVER])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
ComelitCoverEntity(coordinator, device, config_entry.entry_id) ComelitCoverEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[COVER].values() for device in coordinator.data[COVER].values()
if device.index in new_devices
) )
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
"""Cover device.""" """Cover device."""

View File

@@ -27,22 +27,11 @@ async def async_setup_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
known_devices: set[int] = set()
def _check_device() -> None:
current_devices = set(coordinator.data[LIGHT])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities( async_add_entities(
ComelitLightEntity(coordinator, device, config_entry.entry_id) ComelitLightEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[LIGHT].values() for device in coordinator.data[LIGHT].values()
if device.index in new_devices
) )
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity): class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
"""Light device.""" """Light device."""

View File

@@ -7,6 +7,6 @@
"integration_type": "hub", "integration_type": "hub",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["aiocomelit"], "loggers": ["aiocomelit"],
"quality_scale": "platinum", "quality_scale": "silver",
"requirements": ["aiocomelit==0.12.3"] "requirements": ["aiocomelit==0.12.3"]
} }

View File

@@ -57,7 +57,9 @@ rules:
docs-supported-functions: done docs-supported-functions: done
docs-troubleshooting: done docs-troubleshooting: done
docs-use-cases: done docs-use-cases: done
dynamic-devices: done dynamic-devices:
status: todo
comment: missing implementation
entity-category: entity-category:
status: exempt status: exempt
comment: no config or diagnostic entities comment: no config or diagnostic entities

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from typing import Final, cast from typing import Final, cast
from aiocomelit.api import ComelitSerialBridgeObject, ComelitVedoZoneObject from aiocomelit import ComelitSerialBridgeObject, ComelitVedoZoneObject
from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState
from homeassistant.components.sensor import ( from homeassistant.components.sensor import (
@@ -65,24 +65,15 @@ async def async_setup_bridge_entry(
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
known_devices: set[int] = set() entities: list[ComelitBridgeSensorEntity] = []
for device in coordinator.data[OTHER].values():
def _check_device() -> None: entities.extend(
current_devices = set(coordinator.data[OTHER])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitBridgeSensorEntity( ComelitBridgeSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc coordinator, device, config_entry.entry_id, sensor_desc
) )
for sensor_desc in SENSOR_BRIDGE_TYPES for sensor_desc in SENSOR_BRIDGE_TYPES
for device in coordinator.data[OTHER].values()
if device.index in new_devices
) )
async_add_entities(entities)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
async def async_setup_vedo_entry( async def async_setup_vedo_entry(
@@ -94,24 +85,15 @@ async def async_setup_vedo_entry(
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
known_devices: set[int] = set() entities: list[ComelitVedoSensorEntity] = []
for device in coordinator.data["alarm_zones"].values():
def _check_device() -> None: entities.extend(
current_devices = set(coordinator.data["alarm_zones"])
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
ComelitVedoSensorEntity( ComelitVedoSensorEntity(
coordinator, device, config_entry.entry_id, sensor_desc coordinator, device, config_entry.entry_id, sensor_desc
) )
for sensor_desc in SENSOR_VEDO_TYPES for sensor_desc in SENSOR_VEDO_TYPES
for device in coordinator.data["alarm_zones"].values()
if device.index in new_devices
) )
async_add_entities(entities)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity): class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):

View File

@@ -39,25 +39,6 @@ async def async_setup_entry(
) )
async_add_entities(entities) async_add_entities(entities)
known_devices: dict[str, set[int]] = {
dev_type: set() for dev_type in (IRRIGATION, OTHER)
}
def _check_device() -> None:
for dev_type in (IRRIGATION, OTHER):
current_devices = set(coordinator.data[dev_type])
new_devices = current_devices - known_devices[dev_type]
if new_devices:
known_devices[dev_type].update(new_devices)
async_add_entities(
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
for device in coordinator.data[dev_type].values()
if device.index in new_devices
)
_check_device()
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity): class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
"""Switch device.""" """Switch device."""

View File

@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the config component.""" """Set up the config component."""
frontend.async_register_built_in_panel( frontend.async_register_built_in_panel(
hass, "config", "config", "mdi:cog", require_admin=True hass, "config", "config", "hass:cog", require_admin=True
) )
for panel in SECTIONS: for panel in SECTIONS:

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
from http import HTTPStatus from http import HTTPStatus
import logging
from typing import Any, NoReturn from typing import Any, NoReturn
from aiohttp import web from aiohttp import web
@@ -24,12 +23,7 @@ from homeassistant.helpers.data_entry_flow import (
FlowManagerResourceView, FlowManagerResourceView,
) )
from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.json import ( from homeassistant.helpers.json import json_fragment
JSON_DUMP,
find_paths_unserializable_data,
json_bytes,
json_fragment,
)
from homeassistant.loader import ( from homeassistant.loader import (
Integration, Integration,
IntegrationNotFound, IntegrationNotFound,
@@ -37,9 +31,6 @@ from homeassistant.loader import (
async_get_integrations, async_get_integrations,
async_get_loaded_integration, async_get_loaded_integration,
) )
from homeassistant.util.json import format_unserializable_data
_LOGGER = logging.getLogger(__name__)
@callback @callback
@@ -411,40 +402,18 @@ def config_entries_flow_subscribe(
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow( connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
async_on_flow_init_remove async_on_flow_init_remove
) )
try: connection.send_message(
serialized_flows = [ websocket_api.event_message(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw}) msg["id"],
[
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
for flw in hass.config_entries.flow.async_progress() for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"] if flw["context"]["source"]
not in ( not in (
config_entries.SOURCE_RECONFIGURE, config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER, config_entries.SOURCE_USER,
) )
] ],
except (ValueError, TypeError):
# If we can't serialize, we'll filter out unserializable flows
serialized_flows = []
for flw in hass.config_entries.flow.async_progress():
if flw["context"]["source"] in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
):
continue
try:
serialized_flows.append(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
)
except (ValueError, TypeError):
_LOGGER.error(
"Unable to serialize to JSON. Bad data found at %s",
format_unserializable_data(
find_paths_unserializable_data(flw, dump=JSON_DUMP)
),
)
continue
connection.send_message(
websocket_api.messages.construct_event_message(
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]"))
) )
) )
connection.send_result(msg["id"]) connection.send_result(msg["id"])

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation", "documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity", "integration_type": "entity",
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"] "requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.3"]
} }

View File

@@ -1,58 +0,0 @@
"""The Cync integration."""
from __future__ import annotations
from pycync import Auth, Cync, User
from pycync.exceptions import AuthFailedError, CyncError
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_AUTHORIZE_STRING,
CONF_EXPIRES_AT,
CONF_REFRESH_TOKEN,
CONF_USER_ID,
)
from .coordinator import CyncConfigEntry, CyncCoordinator
_PLATFORMS: list[Platform] = [Platform.LIGHT]
async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool:
"""Set up Cync from a config entry."""
user_info = User(
entry.data[CONF_ACCESS_TOKEN],
entry.data[CONF_REFRESH_TOKEN],
entry.data[CONF_AUTHORIZE_STRING],
entry.data[CONF_USER_ID],
expires_at=entry.data[CONF_EXPIRES_AT],
)
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
try:
cync = await Cync.create(cync_auth)
except AuthFailedError as ex:
raise ConfigEntryAuthFailed("User token invalid") from ex
except CyncError as ex:
raise ConfigEntryNotReady("Unable to connect to Cync") from ex
devices_coordinator = CyncCoordinator(hass, entry, cync)
cync.set_update_callback(devices_coordinator.on_data_update)
await devices_coordinator.async_config_entry_first_refresh()
entry.runtime_data = devices_coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool:
"""Unload a config entry."""
cync = entry.runtime_data.cync
await cync.shut_down()
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -1,118 +0,0 @@
"""Config flow for the Cync integration."""
from __future__ import annotations
import logging
from typing import Any
from pycync import Auth
from pycync.exceptions import AuthFailedError, CyncError, TwoFactorRequiredError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_AUTHORIZE_STRING,
CONF_EXPIRES_AT,
CONF_REFRESH_TOKEN,
CONF_TWO_FACTOR_CODE,
CONF_USER_ID,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_EMAIL): str,
vol.Required(CONF_PASSWORD): str,
}
)
STEP_TWO_FACTOR_SCHEMA = vol.Schema({vol.Required(CONF_TWO_FACTOR_CODE): str})
class CyncConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Cync."""
VERSION = 1
cync_auth: Auth
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Attempt login with user credentials."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
self.cync_auth = Auth(
async_get_clientsession(self.hass),
username=user_input[CONF_EMAIL],
password=user_input[CONF_PASSWORD],
)
try:
await self.cync_auth.login()
except AuthFailedError:
errors["base"] = "invalid_auth"
except TwoFactorRequiredError:
return await self.async_step_two_factor()
except CyncError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return await self._create_config_entry(self.cync_auth.username)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_two_factor(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Attempt login with the two factor auth code sent to the user."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="two_factor", data_schema=STEP_TWO_FACTOR_SCHEMA, errors=errors
)
try:
await self.cync_auth.login(user_input[CONF_TWO_FACTOR_CODE])
except AuthFailedError:
errors["base"] = "invalid_auth"
except CyncError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return await self._create_config_entry(self.cync_auth.username)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def _create_config_entry(self, user_email: str) -> ConfigFlowResult:
"""Create the Cync config entry using input user data."""
cync_user = self.cync_auth.user
await self.async_set_unique_id(str(cync_user.user_id))
self._abort_if_unique_id_configured()
config = {
CONF_USER_ID: cync_user.user_id,
CONF_AUTHORIZE_STRING: cync_user.authorize,
CONF_EXPIRES_AT: cync_user.expires_at,
CONF_ACCESS_TOKEN: cync_user.access_token,
CONF_REFRESH_TOKEN: cync_user.refresh_token,
}
return self.async_create_entry(title=user_email, data=config)

View File

@@ -1,9 +0,0 @@
"""Constants for the Cync integration."""
DOMAIN = "cync"
CONF_TWO_FACTOR_CODE = "two_factor_code"
CONF_USER_ID = "user_id"
CONF_AUTHORIZE_STRING = "authorize_string"
CONF_EXPIRES_AT = "expires_at"
CONF_REFRESH_TOKEN = "refresh_token"

View File

@@ -1,87 +0,0 @@
"""Coordinator to handle keeping device states up to date."""
from __future__ import annotations
from datetime import timedelta
import logging
import time
from pycync import Cync, CyncDevice, User
from pycync.exceptions import AuthFailedError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_EXPIRES_AT, CONF_REFRESH_TOKEN
_LOGGER = logging.getLogger(__name__)
type CyncConfigEntry = ConfigEntry[CyncCoordinator]
class CyncCoordinator(DataUpdateCoordinator[dict[int, CyncDevice]]):
"""Coordinator to handle updating Cync device states."""
config_entry: CyncConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: CyncConfigEntry, cync: Cync
) -> None:
"""Initialize the Cync coordinator."""
super().__init__(
hass,
_LOGGER,
name="Cync Data Coordinator",
config_entry=config_entry,
update_interval=timedelta(seconds=30),
always_update=True,
)
self.cync = cync
async def on_data_update(self, data: dict[int, CyncDevice]) -> None:
"""Update registered devices with new data."""
merged_data = self.data | data if self.data else data
self.async_set_updated_data(merged_data)
async def _async_setup(self) -> None:
"""Set up the coordinator with initial device states."""
logged_in_user = self.cync.get_logged_in_user()
if logged_in_user.access_token != self.config_entry.data[CONF_ACCESS_TOKEN]:
await self._update_config_cync_credentials(logged_in_user)
async def _async_update_data(self) -> dict[int, CyncDevice]:
"""First, refresh the user's auth token if it is set to expire in less than one hour.
Then, fetch all current device states.
"""
logged_in_user = self.cync.get_logged_in_user()
if logged_in_user.expires_at - time.time() < 3600:
await self._async_refresh_cync_credentials()
self.cync.update_device_states()
current_device_states = self.cync.get_devices()
return {device.device_id: device for device in current_device_states}
async def _async_refresh_cync_credentials(self) -> None:
"""Attempt to refresh the Cync user's authentication token."""
try:
refreshed_user = await self.cync.refresh_credentials()
except AuthFailedError as ex:
raise ConfigEntryAuthFailed("Unable to refresh user token") from ex
else:
await self._update_config_cync_credentials(refreshed_user)
async def _update_config_cync_credentials(self, user_info: User) -> None:
"""Update the config entry with current user info."""
new_data = {**self.config_entry.data}
new_data[CONF_ACCESS_TOKEN] = user_info.access_token
new_data[CONF_REFRESH_TOKEN] = user_info.refresh_token
new_data[CONF_EXPIRES_AT] = user_info.expires_at
self.hass.config_entries.async_update_entry(self.config_entry, data=new_data)

View File

@@ -1,45 +0,0 @@
"""Setup for a generic entity type for the Cync integration."""
from pycync.devices import CyncDevice
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import CyncCoordinator
class CyncBaseEntity(CoordinatorEntity[CyncCoordinator]):
"""Generic base entity for Cync devices."""
_attr_has_entity_name = True
def __init__(
self,
device: CyncDevice,
coordinator: CyncCoordinator,
room_name: str | None = None,
) -> None:
"""Pass coordinator to CoordinatorEntity."""
super().__init__(coordinator)
self._cync_device_id = device.device_id
self._attr_unique_id = device.unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.unique_id)},
manufacturer="GE Lighting",
name=device.name,
suggested_area=room_name,
)
@property
def available(self) -> bool:
"""Determines whether this device is currently available."""
return (
super().available
and self.coordinator.data is not None
and self._cync_device_id in self.coordinator.data
and self.coordinator.data[self._cync_device_id].is_online
)

View File

@@ -1,180 +0,0 @@
"""Support for Cync light entities."""
from typing import Any
from pycync import CyncLight
from pycync.devices.capabilities import CyncCapability
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP_KELVIN,
ATTR_RGB_COLOR,
ColorMode,
LightEntity,
filter_supported_color_modes,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.color import value_to_brightness
from homeassistant.util.scaling import scale_ranged_value_to_int_range
from .coordinator import CyncConfigEntry, CyncCoordinator
from .entity import CyncBaseEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: CyncConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Cync lights from a config entry."""
coordinator = entry.runtime_data
cync = coordinator.cync
entities_to_add = []
for home in cync.get_homes():
for room in home.rooms:
room_lights = [
CyncLightEntity(device, coordinator, room.name)
for device in room.devices
if isinstance(device, CyncLight)
]
entities_to_add.extend(room_lights)
group_lights = [
CyncLightEntity(device, coordinator, room.name)
for group in room.groups
for device in group.devices
if isinstance(device, CyncLight)
]
entities_to_add.extend(group_lights)
async_add_entities(entities_to_add)
class CyncLightEntity(CyncBaseEntity, LightEntity):
"""Representation of a Cync light."""
_attr_color_mode = ColorMode.ONOFF
_attr_min_color_temp_kelvin = 2000
_attr_max_color_temp_kelvin = 7000
_attr_translation_key = "light"
_attr_name = None
BRIGHTNESS_SCALE = (0, 100)
def __init__(
self,
device: CyncLight,
coordinator: CyncCoordinator,
room_name: str | None = None,
) -> None:
"""Set up base attributes."""
super().__init__(device, coordinator, room_name)
supported_color_modes = {ColorMode.ONOFF}
if device.supports_capability(CyncCapability.CCT_COLOR):
supported_color_modes.add(ColorMode.COLOR_TEMP)
if device.supports_capability(CyncCapability.DIMMING):
supported_color_modes.add(ColorMode.BRIGHTNESS)
if device.supports_capability(CyncCapability.RGB_COLOR):
supported_color_modes.add(ColorMode.RGB)
self._attr_supported_color_modes = filter_supported_color_modes(
supported_color_modes
)
@property
def is_on(self) -> bool | None:
"""Return True if the light is on."""
return self._device.is_on
@property
def brightness(self) -> int:
"""Provide the light's current brightness."""
return value_to_brightness(self.BRIGHTNESS_SCALE, self._device.brightness)
@property
def color_temp_kelvin(self) -> int:
"""Return color temperature in kelvin."""
return scale_ranged_value_to_int_range(
(1, 100),
(self.min_color_temp_kelvin, self.max_color_temp_kelvin),
self._device.color_temp,
)
@property
def rgb_color(self) -> tuple[int, int, int]:
"""Provide the light's current color in RGB format."""
return self._device.rgb
@property
def color_mode(self) -> str | None:
"""Return the active color mode."""
if (
self._device.supports_capability(CyncCapability.CCT_COLOR)
and self._device.color_mode > 0
and self._device.color_mode <= 100
):
return ColorMode.COLOR_TEMP
if (
self._device.supports_capability(CyncCapability.RGB_COLOR)
and self._device.color_mode == 254
):
return ColorMode.RGB
if self._device.supports_capability(CyncCapability.DIMMING):
return ColorMode.BRIGHTNESS
return ColorMode.ONOFF
async def async_turn_on(self, **kwargs: Any) -> None:
"""Process an action on the light."""
if not kwargs:
await self._device.turn_on()
elif kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None:
color_temp = kwargs.get(ATTR_COLOR_TEMP_KELVIN)
converted_color_temp = self._normalize_color_temp(color_temp)
await self._device.set_color_temp(converted_color_temp)
elif kwargs.get(ATTR_RGB_COLOR) is not None:
rgb = kwargs.get(ATTR_RGB_COLOR)
await self._device.set_rgb(rgb)
elif kwargs.get(ATTR_BRIGHTNESS) is not None:
brightness = kwargs.get(ATTR_BRIGHTNESS)
converted_brightness = self._normalize_brightness(brightness)
await self._device.set_brightness(converted_brightness)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""
await self._device.turn_off()
def _normalize_brightness(self, brightness: float | None) -> int | None:
"""Return calculated brightness value scaled between 0-100."""
if brightness is not None:
return int((brightness / 255) * 100)
return None
def _normalize_color_temp(self, color_temp_kelvin: float | None) -> int | None:
"""Return calculated color temp value scaled between 1-100."""
if color_temp_kelvin is not None:
kelvin_range = self.max_color_temp_kelvin - self.min_color_temp_kelvin
scaled_kelvin = int(
((color_temp_kelvin - self.min_color_temp_kelvin) / kelvin_range) * 100
)
if scaled_kelvin == 0:
scaled_kelvin += 1
return scaled_kelvin
return None
@property
def _device(self) -> CyncLight:
"""Fetch the reference to the backing Cync light for this device."""
return self.coordinator.data[self._cync_device_id]

View File

@@ -1,11 +0,0 @@
{
"domain": "cync",
"name": "Cync",
"codeowners": ["@Kinachi249"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/cync",
"integration_type": "hub",
"iot_class": "cloud_push",
"quality_scale": "bronze",
"requirements": ["pycync==0.4.1"]
}

View File

@@ -1,69 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide additional actions.
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: todo
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: todo
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -1,32 +0,0 @@
{
"config": {
"step": {
"user": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "Your Cync account's email address",
"password": "Your Cync account's password"
}
},
"two_factor": {
"data": {
"two_factor_code": "Two-factor code"
},
"data_description": {
"two_factor_code": "The two-factor code sent to your Cync account's email"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
}
}
}

View File

@@ -32,7 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, entry,
options={**entry.options, CONF_SOURCE: source_entity_id}, options={**entry.options, CONF_SOURCE: source_entity_id},
) )
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload( entry.async_on_unload(
async_handle_source_entity_changes( async_handle_source_entity_changes(
@@ -47,9 +46,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
) )
) )
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,)) await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,)) return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))

View File

@@ -140,7 +140,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1 VERSION = 1
MINOR_VERSION = 4 MINOR_VERSION = 4

View File

@@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import ( from homeassistant.helpers.condition import (
Condition, Condition,
ConditionCheckerType, ConditionCheckerType,
ConditionConfig,
trace_condition_function, trace_condition_function,
) )
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
@@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition): class DeviceCondition(Condition):
"""Device condition.""" """Device condition."""
_hass: HomeAssistant def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
_config: ConfigType """Initialize condition."""
self._config = config
@classmethod self._hass = hass
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
@classmethod @classmethod
async def async_validate_config( async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType: ) -> ConfigType:
"""Validate config. """Validate device condition config."""
return await async_validate_device_automation_config(
This is here just to satisfy the abstract class interface. It is never called. hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
""" )
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
async def async_get_checker(self) -> condition.ConditionCheckerType: async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition.""" """Test a device condition."""

View File

@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
self._attr_translation_key = "button" self._attr_translation_key = "button"
self._attr_translation_placeholders = {"key": str(key)} self._attr_translation_placeholders = {"key": str(key)}
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the binary sensor state.""" """Update the binary sensor state."""
if ( if (
message[0] == self._remote_control_property.element_uid message[0] == self._remote_control_property.element_uid

View File

@@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity):
) )
self.subscriber: Subscriber | None = None self.subscriber: Subscriber | None = None
self.sync_callback = self._sync
self._value: float self._value: float
@@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity):
self._device_instance.uid, self.subscriber self._device_instance.uid, self.subscriber
) )
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the state.""" """Update the state."""
if message[0] == self._attr_unique_id: if message[0] == self._attr_unique_id:
self._value = message[1] self._value = message[1]

View File

@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
""" """
return f"{self._attr_unique_id}_{self._sensor_type}" return f"{self._attr_unique_id}_{self._sensor_type}"
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the consumption sensor state.""" """Update the consumption sensor state."""
if message[0] == self._attr_unique_id: if message[0] == self._attr_unique_id:
self._value = getattr( self._value = getattr(

View File

@@ -13,3 +13,8 @@ class Subscriber:
"""Initiate the subscriber.""" """Initiate the subscriber."""
self.name = name self.name = name
self.callback = callback self.callback = callback
def update(self, message: str) -> None:
"""Trigger hass to update the device."""
_LOGGER.debug('%s got message "%s"', self.name, message)
self.callback(message)

View File

@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
"""Switch off the device.""" """Switch off the device."""
self._binary_switch_property.set(state=False) self._binary_switch_property.set(state=False)
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the binary switch state and consumption.""" """Update the binary switch state and consumption."""
if message[0].startswith("devolo.BinarySwitch"): if message[0].startswith("devolo.BinarySwitch"):
self._attr_is_on = self._device_instance.binary_switch_property[ self._attr_is_on = self._device_instance.binary_switch_property[

View File

@@ -17,6 +17,6 @@
"requirements": [ "requirements": [
"aiodhcpwatcher==1.2.1", "aiodhcpwatcher==1.2.1",
"aiodiscover==2.7.1", "aiodiscover==2.7.1",
"cached-ipaddress==1.0.1" "cached-ipaddress==0.10.0"
] ]
} }

Some files were not shown because too many files have changed in this diff Show More