mirror of
				https://github.com/home-assistant/core.git
				synced 2025-10-31 14:39:27 +00:00 
			
		
		
		
	Compare commits
	
		
			5 Commits
		
	
	
		
			zjs-config
			...
			otbr_retry
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | e2b2b05d60 | ||
|   | a298d0b10e | ||
|   | cdb51fc777 | ||
|   | a4a525124d | ||
|   | 693940350c | 
| @@ -58,7 +58,6 @@ base_platforms: &base_platforms | ||||
| # Extra components that trigger the full suite | ||||
| components: &components | ||||
|   - homeassistant/components/alexa/** | ||||
|   - homeassistant/components/analytics/** | ||||
|   - homeassistant/components/application_credentials/** | ||||
|   - homeassistant/components/assist_pipeline/** | ||||
|   - homeassistant/components/auth/** | ||||
|   | ||||
							
								
								
									
										14
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -190,7 +190,7 @@ jobs: | ||||
|           echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -198,7 +198,7 @@ jobs: | ||||
|  | ||||
|       # home-assistant/builder doesn't support sha pinning | ||||
|       - name: Build base image | ||||
|         uses: home-assistant/builder@2025.09.0 | ||||
|         uses: home-assistant/builder@2025.03.0 | ||||
|         with: | ||||
|           args: | | ||||
|             $BUILD_ARGS \ | ||||
| @@ -257,7 +257,7 @@ jobs: | ||||
|           fi | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -265,7 +265,7 @@ jobs: | ||||
|  | ||||
|       # home-assistant/builder doesn't support sha pinning | ||||
|       - name: Build base image | ||||
|         uses: home-assistant/builder@2025.09.0 | ||||
|         uses: home-assistant/builder@2025.03.0 | ||||
|         with: | ||||
|           args: | | ||||
|             $BUILD_ARGS \ | ||||
| @@ -332,14 +332,14 @@ jobs: | ||||
|  | ||||
|       - name: Login to DockerHub | ||||
|         if: matrix.registry == 'docker.io/homeassistant' | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         if: matrix.registry == 'ghcr.io/home-assistant' | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -504,7 +504,7 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|   | ||||
							
								
								
									
										85
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										85
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -40,7 +40,7 @@ env: | ||||
|   CACHE_VERSION: 8 | ||||
|   UV_CACHE_VERSION: 1 | ||||
|   MYPY_CACHE_VERSION: 1 | ||||
|   HA_SHORT_VERSION: "2025.11" | ||||
|   HA_SHORT_VERSION: "2025.10" | ||||
|   DEFAULT_PYTHON: "3.13" | ||||
|   ALL_PYTHON_VERSIONS: "['3.13']" | ||||
|   # 10.3 is the oldest supported version | ||||
| @@ -263,7 +263,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: >- | ||||
| @@ -279,7 +279,7 @@ jobs: | ||||
|           uv pip install "$(cat requirements_test.txt | grep pre-commit)" | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           lookup-only: true | ||||
| @@ -309,7 +309,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -318,7 +318,7 @@ jobs: | ||||
|               needs.info.outputs.pre-commit_cache_key }} | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           fail-on-cache-miss: true | ||||
| @@ -349,7 +349,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -358,7 +358,7 @@ jobs: | ||||
|               needs.info.outputs.pre-commit_cache_key }} | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           fail-on-cache-miss: true | ||||
| @@ -389,7 +389,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -398,7 +398,7 @@ jobs: | ||||
|               needs.info.outputs.pre-commit_cache_key }} | ||||
|       - name: Restore pre-commit environment from cache | ||||
|         id: cache-precommit | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: ${{ env.PRE_COMMIT_CACHE }} | ||||
|           fail-on-cache-miss: true | ||||
| @@ -505,7 +505,7 @@ jobs: | ||||
|             env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: >- | ||||
| @@ -513,7 +513,7 @@ jobs: | ||||
|             needs.info.outputs.python_cache_key }} | ||||
|       - name: Restore uv wheel cache | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: ${{ env.UV_CACHE_DIR }} | ||||
|           key: >- | ||||
| @@ -523,24 +523,22 @@ jobs: | ||||
|             ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{ | ||||
|             env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{ | ||||
|             env.HA_SHORT_VERSION }}- | ||||
|       - name: Check if apt cache exists | ||||
|         id: cache-apt-check | ||||
|         uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|       - name: Restore apt cache | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         id: cache-apt | ||||
|         uses: actions/cache@v4.2.4 | ||||
|         with: | ||||
|           lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }} | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
|             ${{ env.APT_LIST_CACHE_DIR }} | ||||
|           key: >- | ||||
|             ${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }} | ||||
|       - name: Install additional OS dependencies | ||||
|         if: | | ||||
|           steps.cache-venv.outputs.cache-hit != 'true' | ||||
|           || steps.cache-apt-check.outputs.cache-hit != 'true' | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         timeout-minutes: 10 | ||||
|         run: | | ||||
|           sudo rm /etc/apt/sources.list.d/microsoft-prod.list | ||||
|           if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then | ||||
|           if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then | ||||
|             mkdir -p ${{ env.APT_CACHE_DIR }} | ||||
|             mkdir -p ${{ env.APT_LIST_CACHE_DIR }} | ||||
|           fi | ||||
| @@ -565,18 +563,9 @@ jobs: | ||||
|             libswscale-dev \ | ||||
|             libudev-dev | ||||
|  | ||||
|           if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then | ||||
|           if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then | ||||
|             sudo chmod -R 755 ${{ env.APT_CACHE_BASE }} | ||||
|           fi | ||||
|       - name: Save apt cache | ||||
|         if: steps.cache-apt-check.outputs.cache-hit != 'true' | ||||
|         uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
|             ${{ env.APT_LIST_CACHE_DIR }} | ||||
|           key: >- | ||||
|             ${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }} | ||||
|       - name: Create Python virtual environment | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
| @@ -622,7 +611,7 @@ jobs: | ||||
|       - base | ||||
|     steps: | ||||
|       - name: Restore apt cache | ||||
|         uses: actions/cache/restore@v4.3.0 | ||||
|         uses: actions/cache/restore@v4.2.4 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
| @@ -651,7 +640,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -684,7 +673,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -711,7 +700,7 @@ jobs: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|       - name: Dependency review | ||||
|         uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0 | ||||
|         uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3 | ||||
|         with: | ||||
|           license-check: false # We use our own license audit checks | ||||
|  | ||||
| @@ -741,7 +730,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ matrix.python-version }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -784,7 +773,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -831,7 +820,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -883,7 +872,7 @@ jobs: | ||||
|             env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT | ||||
|       - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -891,7 +880,7 @@ jobs: | ||||
|             ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ | ||||
|             needs.info.outputs.python_cache_key }} | ||||
|       - name: Restore mypy cache | ||||
|         uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: .mypy_cache | ||||
|           key: >- | ||||
| @@ -935,7 +924,7 @@ jobs: | ||||
|     name: Split tests for full run | ||||
|     steps: | ||||
|       - name: Restore apt cache | ||||
|         uses: actions/cache/restore@v4.3.0 | ||||
|         uses: actions/cache/restore@v4.2.4 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
| @@ -967,7 +956,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore base Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -1009,7 +998,7 @@ jobs: | ||||
|       Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) | ||||
|     steps: | ||||
|       - name: Restore apt cache | ||||
|         uses: actions/cache/restore@v4.3.0 | ||||
|         uses: actions/cache/restore@v4.2.4 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
| @@ -1042,7 +1031,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ matrix.python-version }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -1156,7 +1145,7 @@ jobs: | ||||
|       Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }} | ||||
|     steps: | ||||
|       - name: Restore apt cache | ||||
|         uses: actions/cache/restore@v4.3.0 | ||||
|         uses: actions/cache/restore@v4.2.4 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
| @@ -1189,7 +1178,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ matrix.python-version }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -1310,7 +1299,7 @@ jobs: | ||||
|       Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }} | ||||
|     steps: | ||||
|       - name: Restore apt cache | ||||
|         uses: actions/cache/restore@v4.3.0 | ||||
|         uses: actions/cache/restore@v4.2.4 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
| @@ -1345,7 +1334,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ matrix.python-version }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
| @@ -1485,7 +1474,7 @@ jobs: | ||||
|       Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) | ||||
|     steps: | ||||
|       - name: Restore apt cache | ||||
|         uses: actions/cache/restore@v4.3.0 | ||||
|         uses: actions/cache/restore@v4.2.4 | ||||
|         with: | ||||
|           path: | | ||||
|             ${{ env.APT_CACHE_DIR }} | ||||
| @@ -1518,7 +1507,7 @@ jobs: | ||||
|           check-latest: true | ||||
|       - name: Restore full Python ${{ matrix.python-version }} virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||
|         uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 | ||||
|         with: | ||||
|           path: venv | ||||
|           fail-on-cache-miss: true | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @@ -24,11 +24,11 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Initialize CodeQL | ||||
|         uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5 | ||||
|         uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 | ||||
|         with: | ||||
|           languages: python | ||||
|  | ||||
|       - name: Perform CodeQL Analysis | ||||
|         uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5 | ||||
|         uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 | ||||
|         with: | ||||
|           category: "/language:python" | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/wheels.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/wheels.yml
									
									
									
									
										vendored
									
									
								
							| @@ -160,7 +160,7 @@ jobs: | ||||
|  | ||||
|       # home-assistant/wheels doesn't support sha pinning | ||||
|       - name: Build wheels | ||||
|         uses: home-assistant/wheels@2025.09.1 | ||||
|         uses: home-assistant/wheels@2025.07.0 | ||||
|         with: | ||||
|           abi: ${{ matrix.abi }} | ||||
|           tag: musllinux_1_2 | ||||
| @@ -221,7 +221,7 @@ jobs: | ||||
|  | ||||
|       # home-assistant/wheels doesn't support sha pinning | ||||
|       - name: Build wheels | ||||
|         uses: home-assistant/wheels@2025.09.1 | ||||
|         uses: home-assistant/wheels@2025.07.0 | ||||
|         with: | ||||
|           abi: ${{ matrix.abi }} | ||||
|           tag: musllinux_1_2 | ||||
|   | ||||
| @@ -142,7 +142,6 @@ homeassistant.components.cloud.* | ||||
| homeassistant.components.co2signal.* | ||||
| homeassistant.components.comelit.* | ||||
| homeassistant.components.command_line.* | ||||
| homeassistant.components.compit.* | ||||
| homeassistant.components.config.* | ||||
| homeassistant.components.configurator.* | ||||
| homeassistant.components.cookidoo.* | ||||
| @@ -203,7 +202,6 @@ homeassistant.components.feedreader.* | ||||
| homeassistant.components.file_upload.* | ||||
| homeassistant.components.filesize.* | ||||
| homeassistant.components.filter.* | ||||
| homeassistant.components.firefly_iii.* | ||||
| homeassistant.components.fitbit.* | ||||
| homeassistant.components.flexit_bacnet.* | ||||
| homeassistant.components.flux_led.* | ||||
| @@ -326,7 +324,6 @@ homeassistant.components.london_underground.* | ||||
| homeassistant.components.lookin.* | ||||
| homeassistant.components.lovelace.* | ||||
| homeassistant.components.luftdaten.* | ||||
| homeassistant.components.lunatone.* | ||||
| homeassistant.components.madvr.* | ||||
| homeassistant.components.manual.* | ||||
| homeassistant.components.mastodon.* | ||||
| @@ -445,7 +442,6 @@ homeassistant.components.rituals_perfume_genie.* | ||||
| homeassistant.components.roborock.* | ||||
| homeassistant.components.roku.* | ||||
| homeassistant.components.romy.* | ||||
| homeassistant.components.route_b_smart_meter.* | ||||
| homeassistant.components.rpi_power.* | ||||
| homeassistant.components.rss_feed_template.* | ||||
| homeassistant.components.russound_rio.* | ||||
|   | ||||
							
								
								
									
										36
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										36
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							| @@ -107,8 +107,8 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/ambient_station/ @bachya | ||||
| /tests/components/ambient_station/ @bachya | ||||
| /homeassistant/components/amcrest/ @flacjacket | ||||
| /homeassistant/components/analytics/ @home-assistant/core | ||||
| /tests/components/analytics/ @home-assistant/core | ||||
| /homeassistant/components/analytics/ @home-assistant/core @ludeeus | ||||
| /tests/components/analytics/ @home-assistant/core @ludeeus | ||||
| /homeassistant/components/analytics_insights/ @joostlek | ||||
| /tests/components/analytics_insights/ @joostlek | ||||
| /homeassistant/components/android_ip_webcam/ @engrbm87 | ||||
| @@ -292,8 +292,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/command_line/ @gjohansson-ST | ||||
| /homeassistant/components/compensation/ @Petro31 | ||||
| /tests/components/compensation/ @Petro31 | ||||
| /homeassistant/components/compit/ @Przemko92 | ||||
| /tests/components/compit/ @Przemko92 | ||||
| /homeassistant/components/config/ @home-assistant/core | ||||
| /tests/components/config/ @home-assistant/core | ||||
| /homeassistant/components/configurator/ @home-assistant/core | ||||
| @@ -316,8 +314,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/crownstone/ @Crownstone @RicArch97 | ||||
| /homeassistant/components/cups/ @fabaff | ||||
| /tests/components/cups/ @fabaff | ||||
| /homeassistant/components/cync/ @Kinachi249 | ||||
| /tests/components/cync/ @Kinachi249 | ||||
| /homeassistant/components/daikin/ @fredrike | ||||
| /tests/components/daikin/ @fredrike | ||||
| /homeassistant/components/date/ @home-assistant/core | ||||
| @@ -412,8 +408,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/egardia/ @jeroenterheerdt | ||||
| /homeassistant/components/eheimdigital/ @autinerd | ||||
| /tests/components/eheimdigital/ @autinerd | ||||
| /homeassistant/components/ekeybionyx/ @richardpolzer | ||||
| /tests/components/ekeybionyx/ @richardpolzer | ||||
| /homeassistant/components/electrasmart/ @jafar-atili | ||||
| /tests/components/electrasmart/ @jafar-atili | ||||
| /homeassistant/components/electric_kiwi/ @mikey0000 | ||||
| @@ -492,8 +486,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/filesize/ @gjohansson-ST | ||||
| /homeassistant/components/filter/ @dgomes | ||||
| /tests/components/filter/ @dgomes | ||||
| /homeassistant/components/firefly_iii/ @erwindouna | ||||
| /tests/components/firefly_iii/ @erwindouna | ||||
| /homeassistant/components/fireservicerota/ @cyberjunky | ||||
| /tests/components/fireservicerota/ @cyberjunky | ||||
| /homeassistant/components/firmata/ @DaAwesomeP | ||||
| @@ -778,8 +770,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/iqvia/ @bachya | ||||
| /tests/components/iqvia/ @bachya | ||||
| /homeassistant/components/irish_rail_transport/ @ttroy50 | ||||
| /homeassistant/components/irm_kmi/ @jdejaegh | ||||
| /tests/components/irm_kmi/ @jdejaegh | ||||
| /homeassistant/components/iron_os/ @tr4nt0r | ||||
| /tests/components/iron_os/ @tr4nt0r | ||||
| /homeassistant/components/isal/ @bdraco | ||||
| @@ -910,8 +900,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/luci/ @mzdrale | ||||
| /homeassistant/components/luftdaten/ @fabaff @frenck | ||||
| /tests/components/luftdaten/ @fabaff @frenck | ||||
| /homeassistant/components/lunatone/ @MoonDevLT | ||||
| /tests/components/lunatone/ @MoonDevLT | ||||
| /homeassistant/components/lupusec/ @majuss @suaveolent | ||||
| /tests/components/lupusec/ @majuss @suaveolent | ||||
| /homeassistant/components/lutron/ @cdheiser @wilburCForce | ||||
| @@ -957,8 +945,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/met_eireann/ @DylanGore | ||||
| /homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame | ||||
| /tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame | ||||
| /homeassistant/components/meteo_lt/ @xE1H | ||||
| /tests/components/meteo_lt/ @xE1H | ||||
| /homeassistant/components/meteoalarm/ @rolfberkenbosch | ||||
| /homeassistant/components/meteoclimatic/ @adrianmo | ||||
| /tests/components/meteoclimatic/ @adrianmo | ||||
| @@ -982,6 +968,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/moat/ @bdraco | ||||
| /homeassistant/components/mobile_app/ @home-assistant/core | ||||
| /tests/components/mobile_app/ @home-assistant/core | ||||
| /homeassistant/components/modbus/ @janiversen | ||||
| /tests/components/modbus/ @janiversen | ||||
| /homeassistant/components/modem_callerid/ @tkdrob | ||||
| /tests/components/modem_callerid/ @tkdrob | ||||
| /homeassistant/components/modern_forms/ @wonderslug | ||||
| @@ -1340,8 +1328,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous | ||||
| /homeassistant/components/roon/ @pavoni | ||||
| /tests/components/roon/ @pavoni | ||||
| /homeassistant/components/route_b_smart_meter/ @SeraphicRav | ||||
| /tests/components/route_b_smart_meter/ @SeraphicRav | ||||
| /homeassistant/components/rpi_power/ @shenxn @swetoast | ||||
| /tests/components/rpi_power/ @shenxn @swetoast | ||||
| /homeassistant/components/rss_feed_template/ @home-assistant/core | ||||
| @@ -1364,8 +1350,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/samsungtv/ @chemelli74 @epenet | ||||
| /homeassistant/components/sanix/ @tomaszsluszniak | ||||
| /tests/components/sanix/ @tomaszsluszniak | ||||
| /homeassistant/components/satel_integra/ @Tommatheussen | ||||
| /tests/components/satel_integra/ @Tommatheussen | ||||
| /homeassistant/components/scene/ @home-assistant/core | ||||
| /tests/components/scene/ @home-assistant/core | ||||
| /homeassistant/components/schedule/ @home-assistant/core | ||||
| @@ -1547,8 +1531,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/switchbee/ @jafar-atili | ||||
| /homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang | ||||
| /tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang | ||||
| /homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git | ||||
| /tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git | ||||
| /homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur | ||||
| /tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur | ||||
| /homeassistant/components/switcher_kis/ @thecode @YogevBokobza | ||||
| /tests/components/switcher_kis/ @thecode @YogevBokobza | ||||
| /homeassistant/components/switchmate/ @danielhiversen @qiz-li | ||||
| @@ -1693,8 +1677,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/uptime_kuma/ @tr4nt0r | ||||
| /homeassistant/components/uptimerobot/ @ludeeus @chemelli74 | ||||
| /tests/components/uptimerobot/ @ludeeus @chemelli74 | ||||
| /homeassistant/components/usage_prediction/ @home-assistant/core | ||||
| /tests/components/usage_prediction/ @home-assistant/core | ||||
| /homeassistant/components/usb/ @bdraco | ||||
| /tests/components/usb/ @bdraco | ||||
| /homeassistant/components/usgs_earthquakes_feed/ @exxamalte | ||||
| @@ -1724,8 +1706,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven | ||||
| /homeassistant/components/vicare/ @CFenner | ||||
| /tests/components/vicare/ @CFenner | ||||
| /homeassistant/components/victron_remote_monitoring/ @AndyTempel | ||||
| /tests/components/victron_remote_monitoring/ @AndyTempel | ||||
| /homeassistant/components/vilfo/ @ManneW | ||||
| /tests/components/vilfo/ @ManneW | ||||
| /homeassistant/components/vivotek/ @HarlemSquirrel | ||||
| @@ -1741,8 +1721,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/volumio/ @OnFreund | ||||
| /homeassistant/components/volvo/ @thomasddn | ||||
| /tests/components/volvo/ @thomasddn | ||||
| /homeassistant/components/volvooncall/ @molobrakos @svrooij | ||||
| /tests/components/volvooncall/ @molobrakos @svrooij | ||||
| /homeassistant/components/volvooncall/ @molobrakos | ||||
| /tests/components/volvooncall/ @molobrakos | ||||
| /homeassistant/components/wake_on_lan/ @ntilley905 | ||||
| /tests/components/wake_on_lan/ @ntilley905 | ||||
| /homeassistant/components/wake_word/ @home-assistant/core @synesthesiam | ||||
|   | ||||
							
								
								
									
										10
									
								
								build.yaml
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								build.yaml
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | ||||
| image: ghcr.io/home-assistant/{arch}-homeassistant | ||||
| build_from: | ||||
|   aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0 | ||||
|   armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0 | ||||
|   armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0 | ||||
|   amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0 | ||||
|   i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0 | ||||
|   aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1 | ||||
|   armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1 | ||||
|   armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1 | ||||
|   amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1 | ||||
|   i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1 | ||||
| codenotary: | ||||
|   signer: notary@home-assistant.io | ||||
|   base_image: notary@home-assistant.io | ||||
|   | ||||
| @@ -616,25 +616,12 @@ async def async_enable_logging( | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     logger = logging.getLogger() | ||||
|     logger.setLevel(logging.INFO if verbose else logging.WARNING) | ||||
|  | ||||
|     # Log errors to a file if we have write access to file or config dir | ||||
|     if log_file is None: | ||||
|         default_log_path = hass.config.path(ERROR_LOG_FILENAME) | ||||
|         if "SUPERVISOR" in os.environ: | ||||
|             _LOGGER.info("Running in Supervisor, not logging to file") | ||||
|             # Rename the default log file if it exists, since previous versions created | ||||
|             # it even on Supervisor | ||||
|             if os.path.isfile(default_log_path): | ||||
|                 with contextlib.suppress(OSError): | ||||
|                     os.rename(default_log_path, f"{default_log_path}.old") | ||||
|             err_log_path = None | ||||
|         else: | ||||
|             err_log_path = default_log_path | ||||
|         err_log_path = hass.config.path(ERROR_LOG_FILENAME) | ||||
|     else: | ||||
|         err_log_path = os.path.abspath(log_file) | ||||
|  | ||||
|     if err_log_path: | ||||
|     err_path_exists = os.path.isfile(err_log_path) | ||||
|     err_dir = os.path.dirname(err_log_path) | ||||
|  | ||||
| @@ -648,7 +635,10 @@ async def async_enable_logging( | ||||
|         ) | ||||
|  | ||||
|         err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) | ||||
|  | ||||
|         logger = logging.getLogger() | ||||
|         logger.addHandler(err_handler) | ||||
|         logger.setLevel(logging.INFO if verbose else logging.WARNING) | ||||
|  | ||||
|         # Save the log file location for access by other components. | ||||
|         hass.data[DATA_LOGGING] = err_log_path | ||||
|   | ||||
| @@ -1,5 +0,0 @@ | ||||
| { | ||||
|   "domain": "eltako", | ||||
|   "name": "Eltako", | ||||
|   "iot_standards": ["matter"] | ||||
| } | ||||
| @@ -1,5 +0,0 @@ | ||||
| { | ||||
|   "domain": "konnected", | ||||
|   "name": "Konnected", | ||||
|   "integrations": ["konnected", "konnected_esphome"] | ||||
| } | ||||
| @@ -1,5 +0,0 @@ | ||||
| { | ||||
|   "domain": "level", | ||||
|   "name": "Level", | ||||
|   "iot_standards": ["matter"] | ||||
| } | ||||
| @@ -8,7 +8,6 @@ import logging | ||||
| from aioacaia.acaiascale import AcaiaScale | ||||
| from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError | ||||
|  | ||||
| from homeassistant.components.bluetooth import async_get_scanner | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_ADDRESS | ||||
| from homeassistant.core import HomeAssistant | ||||
| @@ -43,7 +42,6 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]): | ||||
|             name=entry.title, | ||||
|             is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], | ||||
|             notify_callback=self.async_update_listeners, | ||||
|             scanner=async_get_scanner(hass), | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|   | ||||
| @@ -26,5 +26,5 @@ | ||||
|   "iot_class": "local_push", | ||||
|   "loggers": ["aioacaia"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["aioacaia==0.1.17"] | ||||
|   "requirements": ["aioacaia==0.1.14"] | ||||
| } | ||||
|   | ||||
| @@ -3,7 +3,6 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from asyncio import timeout | ||||
| from collections.abc import Mapping | ||||
| from typing import Any | ||||
|  | ||||
| from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError | ||||
| @@ -23,8 +22,6 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|     """Config flow for AccuWeather.""" | ||||
|  | ||||
|     VERSION = 1 | ||||
|     _latitude: float | None = None | ||||
|     _longitude: float | None = None | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
| @@ -77,46 +74,3 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|             ), | ||||
|             errors=errors, | ||||
|         ) | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, entry_data: Mapping[str, Any] | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle configuration by re-auth.""" | ||||
|         self._latitude = entry_data[CONF_LATITUDE] | ||||
|         self._longitude = entry_data[CONF_LONGITUDE] | ||||
|  | ||||
|         return await self.async_step_reauth_confirm() | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Dialog that informs the user that reauth is required.""" | ||||
|         errors: dict[str, str] = {} | ||||
|  | ||||
|         if user_input is not None: | ||||
|             websession = async_get_clientsession(self.hass) | ||||
|             try: | ||||
|                 async with timeout(10): | ||||
|                     accuweather = AccuWeather( | ||||
|                         user_input[CONF_API_KEY], | ||||
|                         websession, | ||||
|                         latitude=self._latitude, | ||||
|                         longitude=self._longitude, | ||||
|                     ) | ||||
|                     await accuweather.async_get_location() | ||||
|             except (ApiError, ClientConnectorError, TimeoutError, ClientError): | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except InvalidApiKeyError: | ||||
|                 errors["base"] = "invalid_api_key" | ||||
|             except RequestsExceededError: | ||||
|                 errors["base"] = "requests_exceeded" | ||||
|             else: | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     self._get_reauth_entry(), data_updates=user_input | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), | ||||
|             errors=errors, | ||||
|         ) | ||||
|   | ||||
| @@ -15,7 +15,6 @@ from aiohttp.client_exceptions import ClientConnectorError | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_NAME | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed | ||||
| from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo | ||||
| from homeassistant.helpers.update_coordinator import ( | ||||
|     DataUpdateCoordinator, | ||||
| @@ -31,7 +30,7 @@ from .const import ( | ||||
|     UPDATE_INTERVAL_OBSERVATION, | ||||
| ) | ||||
|  | ||||
| EXCEPTIONS = (ApiError, ClientConnectorError, RequestsExceededError) | ||||
| EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError) | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -53,8 +52,6 @@ class AccuWeatherObservationDataUpdateCoordinator( | ||||
| ): | ||||
|     """Class to manage fetching AccuWeather data API.""" | ||||
|  | ||||
|     config_entry: AccuWeatherConfigEntry | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
| @@ -90,12 +87,6 @@ class AccuWeatherObservationDataUpdateCoordinator( | ||||
|                 translation_key="current_conditions_update_error", | ||||
|                 translation_placeholders={"error": repr(error)}, | ||||
|             ) from error | ||||
|         except InvalidApiKeyError as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="auth_error", | ||||
|                 translation_placeholders={"entry": self.config_entry.title}, | ||||
|             ) from err | ||||
|  | ||||
|         _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) | ||||
|  | ||||
| @@ -107,8 +98,6 @@ class AccuWeatherForecastDataUpdateCoordinator( | ||||
| ): | ||||
|     """Base class for AccuWeather forecast.""" | ||||
|  | ||||
|     config_entry: AccuWeatherConfigEntry | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
| @@ -148,12 +137,6 @@ class AccuWeatherForecastDataUpdateCoordinator( | ||||
|                 translation_key="forecast_update_error", | ||||
|                 translation_placeholders={"error": repr(error)}, | ||||
|             ) from error | ||||
|         except InvalidApiKeyError as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="auth_error", | ||||
|                 translation_placeholders={"entry": self.config_entry.title}, | ||||
|             ) from err | ||||
|  | ||||
|         _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) | ||||
|         return result | ||||
|   | ||||
| @@ -7,5 +7,5 @@ | ||||
|   "integration_type": "service", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["accuweather"], | ||||
|   "requirements": ["accuweather==4.2.2"] | ||||
|   "requirements": ["accuweather==4.2.1"] | ||||
| } | ||||
|   | ||||
| @@ -7,17 +7,6 @@ | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||
|           "latitude": "[%key:common::config_flow::data::latitude%]", | ||||
|           "longitude": "[%key:common::config_flow::data::longitude%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "api_key": "API key generated in the AccuWeather APIs portal." | ||||
|         } | ||||
|       }, | ||||
|       "reauth_confirm": { | ||||
|         "data": { | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "api_key": "[%key:component::accuweather::config::step::user::data_description::api_key%]" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
| @@ -30,8 +19,7 @@ | ||||
|       "requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key." | ||||
|     }, | ||||
|     "abort": { | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_location%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
| @@ -251,9 +239,6 @@ | ||||
|     } | ||||
|   }, | ||||
|   "exceptions": { | ||||
|     "auth_error": { | ||||
|       "message": "Authentication failed for {entry}, please update your API key" | ||||
|     }, | ||||
|     "current_conditions_update_error": { | ||||
|       "message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}" | ||||
|     }, | ||||
|   | ||||
| @@ -2,31 +2,21 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from pathlib import Path | ||||
|  | ||||
| from homeassistant.components.media_source import MediaSource, local_source | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
|  | ||||
| from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR | ||||
|  | ||||
|  | ||||
| async def async_get_media_source(hass: HomeAssistant) -> MediaSource: | ||||
|     """Set up local media source.""" | ||||
|     media_dirs = list(hass.config.media_dirs.values()) | ||||
|  | ||||
|     if not media_dirs: | ||||
|         raise HomeAssistantError( | ||||
|             "AI Task media source requires at least one media directory configured" | ||||
|         ) | ||||
|  | ||||
|     media_dir = Path(media_dirs[0]) / DOMAIN / IMAGE_DIR | ||||
|     media_dir = hass.config.path(f"{DOMAIN}/{IMAGE_DIR}") | ||||
|  | ||||
|     hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource( | ||||
|         hass, | ||||
|         DOMAIN, | ||||
|         "AI Generated Images", | ||||
|         {IMAGE_DIR: str(media_dir)}, | ||||
|         {IMAGE_DIR: media_dir}, | ||||
|         f"/{DOMAIN}", | ||||
|     ) | ||||
|     return source | ||||
|   | ||||
| @@ -12,7 +12,7 @@ from typing import Any | ||||
|  | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.components import camera, conversation, image, media_source | ||||
| from homeassistant.components import camera, conversation, media_source | ||||
| from homeassistant.components.http.auth import async_sign_path | ||||
| from homeassistant.core import HomeAssistant, ServiceResponse, callback | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| @@ -31,14 +31,14 @@ from .const import ( | ||||
| ) | ||||
|  | ||||
|  | ||||
| def _save_camera_snapshot(image_data: camera.Image | image.Image) -> Path: | ||||
| def _save_camera_snapshot(image: camera.Image) -> Path: | ||||
|     """Save camera snapshot to temp file.""" | ||||
|     with tempfile.NamedTemporaryFile( | ||||
|         mode="wb", | ||||
|         suffix=mimetypes.guess_extension(image_data.content_type, False), | ||||
|         suffix=mimetypes.guess_extension(image.content_type, False), | ||||
|         delete=False, | ||||
|     ) as temp_file: | ||||
|         temp_file.write(image_data.content) | ||||
|         temp_file.write(image.content) | ||||
|         return Path(temp_file.name) | ||||
|  | ||||
|  | ||||
| @@ -54,31 +54,26 @@ async def _resolve_attachments( | ||||
|     for attachment in attachments or []: | ||||
|         media_content_id = attachment["media_content_id"] | ||||
|  | ||||
|         # Special case for certain media sources | ||||
|         for integration in camera, image: | ||||
|             media_source_prefix = f"media-source://{integration.DOMAIN}/" | ||||
|             if not media_content_id.startswith(media_source_prefix): | ||||
|                 continue | ||||
|  | ||||
|         # Special case for camera media sources | ||||
|         if media_content_id.startswith("media-source://camera/"): | ||||
|             # Extract entity_id from the media content ID | ||||
|             entity_id = media_content_id.removeprefix(media_source_prefix) | ||||
|             entity_id = media_content_id.removeprefix("media-source://camera/") | ||||
|  | ||||
|             # Get snapshot from entity | ||||
|             image_data = await integration.async_get_image(hass, entity_id) | ||||
|             # Get snapshot from camera | ||||
|             image = await camera.async_get_image(hass, entity_id) | ||||
|  | ||||
|             temp_filename = await hass.async_add_executor_job( | ||||
|                 _save_camera_snapshot, image_data | ||||
|                 _save_camera_snapshot, image | ||||
|             ) | ||||
|             created_files.append(temp_filename) | ||||
|  | ||||
|             resolved_attachments.append( | ||||
|                 conversation.Attachment( | ||||
|                     media_content_id=media_content_id, | ||||
|                     mime_type=image_data.content_type, | ||||
|                     mime_type=image.content_type, | ||||
|                     path=temp_filename, | ||||
|                 ) | ||||
|             ) | ||||
|             break | ||||
|         else: | ||||
|             # Handle regular media sources | ||||
|             media = await media_source.async_resolve_media(hass, media_content_id, None) | ||||
|   | ||||
| @@ -4,18 +4,10 @@ from __future__ import annotations | ||||
|  | ||||
| from airos.airos8 import AirOS8 | ||||
|  | ||||
| from homeassistant.const import ( | ||||
|     CONF_HOST, | ||||
|     CONF_PASSWORD, | ||||
|     CONF_SSL, | ||||
|     CONF_USERNAME, | ||||
|     CONF_VERIFY_SSL, | ||||
|     Platform, | ||||
| ) | ||||
| from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
|  | ||||
| from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS | ||||
| from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator | ||||
|  | ||||
| _PLATFORMS: list[Platform] = [ | ||||
| @@ -29,16 +21,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo | ||||
|  | ||||
|     # By default airOS 8 comes with self-signed SSL certificates, | ||||
|     # with no option in the web UI to change or upload a custom certificate. | ||||
|     session = async_get_clientsession( | ||||
|         hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] | ||||
|     ) | ||||
|     session = async_get_clientsession(hass, verify_ssl=False) | ||||
|  | ||||
|     airos_device = AirOS8( | ||||
|         host=entry.data[CONF_HOST], | ||||
|         username=entry.data[CONF_USERNAME], | ||||
|         password=entry.data[CONF_PASSWORD], | ||||
|         session=session, | ||||
|         use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL], | ||||
|     ) | ||||
|  | ||||
|     coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device) | ||||
| @@ -51,30 +40,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||
|     """Migrate old config entry.""" | ||||
|  | ||||
|     if entry.version > 1: | ||||
|         # This means the user has downgraded from a future version | ||||
|         return False | ||||
|  | ||||
|     if entry.version == 1 and entry.minor_version == 1: | ||||
|         new_data = {**entry.data} | ||||
|         advanced_data = { | ||||
|             CONF_SSL: DEFAULT_SSL, | ||||
|             CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL, | ||||
|         } | ||||
|         new_data[SECTION_ADVANCED_SETTINGS] = advanced_data | ||||
|  | ||||
|         hass.config_entries.async_update_entry( | ||||
|             entry, | ||||
|             data=new_data, | ||||
|             minor_version=2, | ||||
|         ) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||
|     """Unload a config entry.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Mapping | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| @@ -15,23 +14,11 @@ from airos.exceptions import ( | ||||
| ) | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import ( | ||||
|     CONF_HOST, | ||||
|     CONF_PASSWORD, | ||||
|     CONF_SSL, | ||||
|     CONF_USERNAME, | ||||
|     CONF_VERIFY_SSL, | ||||
| ) | ||||
| from homeassistant.data_entry_flow import section | ||||
| from homeassistant.config_entries import ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.selector import ( | ||||
|     TextSelector, | ||||
|     TextSelectorConfig, | ||||
|     TextSelectorType, | ||||
| ) | ||||
|  | ||||
| from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS | ||||
| from .const import DOMAIN | ||||
| from .coordinator import AirOS8 | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
| @@ -41,15 +28,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|         vol.Required(CONF_HOST): str, | ||||
|         vol.Required(CONF_USERNAME, default="ubnt"): str, | ||||
|         vol.Required(CONF_PASSWORD): str, | ||||
|         vol.Required(SECTION_ADVANCED_SETTINGS): section( | ||||
|             vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(CONF_SSL, default=DEFAULT_SSL): bool, | ||||
|                     vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool, | ||||
|                 } | ||||
|             ), | ||||
|             {"collapsed": True}, | ||||
|         ), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| @@ -58,47 +36,23 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Ubiquiti airOS.""" | ||||
|  | ||||
|     VERSION = 1 | ||||
|     MINOR_VERSION = 2 | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize the config flow.""" | ||||
|         super().__init__() | ||||
|         self.airos_device: AirOS8 | ||||
|         self.errors: dict[str, str] = {} | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|         self, | ||||
|         user_input: dict[str, Any] | None = None, | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the manual input of host and credentials.""" | ||||
|         self.errors = {} | ||||
|         """Handle the initial step.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         if user_input is not None: | ||||
|             validated_info = await self._validate_and_get_device_info(user_input) | ||||
|             if validated_info: | ||||
|                 return self.async_create_entry( | ||||
|                     title=validated_info["title"], | ||||
|                     data=validated_info["data"], | ||||
|                 ) | ||||
|         return self.async_show_form( | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors | ||||
|         ) | ||||
|  | ||||
|     async def _validate_and_get_device_info( | ||||
|         self, config_data: dict[str, Any] | ||||
|     ) -> dict[str, Any] | None: | ||||
|         """Validate user input with the device API.""" | ||||
|             # By default airOS 8 comes with self-signed SSL certificates, | ||||
|             # with no option in the web UI to change or upload a custom certificate. | ||||
|         session = async_get_clientsession( | ||||
|             self.hass, | ||||
|             verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL], | ||||
|         ) | ||||
|             session = async_get_clientsession(self.hass, verify_ssl=False) | ||||
|  | ||||
|             airos_device = AirOS8( | ||||
|             host=config_data[CONF_HOST], | ||||
|             username=config_data[CONF_USERNAME], | ||||
|             password=config_data[CONF_PASSWORD], | ||||
|                 host=user_input[CONF_HOST], | ||||
|                 username=user_input[CONF_USERNAME], | ||||
|                 password=user_input[CONF_PASSWORD], | ||||
|                 session=session, | ||||
|             use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL], | ||||
|             ) | ||||
|             try: | ||||
|                 await airos_device.login() | ||||
| @@ -108,59 +62,21 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 AirOSConnectionSetupError, | ||||
|                 AirOSDeviceConnectionError, | ||||
|             ): | ||||
|             self.errors["base"] = "cannot_connect" | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except (AirOSConnectionAuthenticationError, AirOSDataMissingError): | ||||
|             self.errors["base"] = "invalid_auth" | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except AirOSKeyDataMissingError: | ||||
|             self.errors["base"] = "key_data_missing" | ||||
|                 errors["base"] = "key_data_missing" | ||||
|             except Exception: | ||||
|             _LOGGER.exception("Unexpected exception during credential validation") | ||||
|             self.errors["base"] = "unknown" | ||||
|                 _LOGGER.exception("Unexpected exception") | ||||
|                 errors["base"] = "unknown" | ||||
|             else: | ||||
|                 await self.async_set_unique_id(airos_data.derived.mac) | ||||
|  | ||||
|             if self.source == SOURCE_REAUTH: | ||||
|                 self._abort_if_unique_id_mismatch() | ||||
|             else: | ||||
|                 self._abort_if_unique_id_configured() | ||||
|  | ||||
|             return {"title": airos_data.host.hostname, "data": config_data} | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any], | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauthentication upon an API authentication error.""" | ||||
|         return await self.async_step_reauth_confirm(user_input) | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any], | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauthentication upon an API authentication error.""" | ||||
|         self.errors = {} | ||||
|  | ||||
|         if user_input: | ||||
|             validate_data = {**self._get_reauth_entry().data, **user_input} | ||||
|             if await self._validate_and_get_device_info(config_data=validate_data): | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     self._get_reauth_entry(), | ||||
|                     data_updates=validate_data, | ||||
|                 return self.async_create_entry( | ||||
|                     title=airos_data.host.hostname, data=user_input | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(CONF_PASSWORD): TextSelector( | ||||
|                         TextSelectorConfig( | ||||
|                             type=TextSelectorType.PASSWORD, | ||||
|                             autocomplete="current-password", | ||||
|                         ) | ||||
|                     ), | ||||
|                 } | ||||
|             ), | ||||
|             errors=self.errors, | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|   | ||||
| @@ -7,8 +7,3 @@ DOMAIN = "airos" | ||||
| SCAN_INTERVAL = timedelta(minutes=1) | ||||
|  | ||||
| MANUFACTURER = "Ubiquiti" | ||||
|  | ||||
| DEFAULT_VERIFY_SSL = False | ||||
| DEFAULT_SSL = True | ||||
|  | ||||
| SECTION_ADVANCED_SETTINGS = "advanced_settings" | ||||
|   | ||||
| @@ -14,7 +14,7 @@ from airos.exceptions import ( | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed | ||||
| from homeassistant.exceptions import ConfigEntryError | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed | ||||
|  | ||||
| from .const import DOMAIN, SCAN_INTERVAL | ||||
| @@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]): | ||||
|         try: | ||||
|             await self.airos_device.login() | ||||
|             return await self.airos_device.status() | ||||
|         except AirOSConnectionAuthenticationError as err: | ||||
|         except (AirOSConnectionAuthenticationError,) as err: | ||||
|             _LOGGER.exception("Error authenticating with airOS device") | ||||
|             raise ConfigEntryAuthFailed( | ||||
|             raise ConfigEntryError( | ||||
|                 translation_domain=DOMAIN, translation_key="invalid_auth" | ||||
|             ) from err | ||||
|         except ( | ||||
|   | ||||
| @@ -2,11 +2,11 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from homeassistant.const import CONF_HOST, CONF_SSL | ||||
| from homeassistant.const import CONF_HOST | ||||
| from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo | ||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity | ||||
|  | ||||
| from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS | ||||
| from .const import DOMAIN, MANUFACTURER | ||||
| from .coordinator import AirOSDataUpdateCoordinator | ||||
|  | ||||
|  | ||||
| @@ -20,14 +20,9 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]): | ||||
|         super().__init__(coordinator) | ||||
|  | ||||
|         airos_data = self.coordinator.data | ||||
|         url_schema = ( | ||||
|             "https" | ||||
|             if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] | ||||
|             else "http" | ||||
|         ) | ||||
|  | ||||
|         configuration_url: str | None = ( | ||||
|             f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}" | ||||
|             f"https://{coordinator.config_entry.data[CONF_HOST]}" | ||||
|         ) | ||||
|  | ||||
|         self._attr_device_info = DeviceInfo( | ||||
|   | ||||
| @@ -6,5 +6,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/airos", | ||||
|   "iot_class": "local_polling", | ||||
|   "quality_scale": "bronze", | ||||
|   "requirements": ["airos==0.5.4"] | ||||
|   "requirements": ["airos==0.5.1"] | ||||
| } | ||||
|   | ||||
| @@ -2,14 +2,6 @@ | ||||
|   "config": { | ||||
|     "flow_title": "Ubiquiti airOS device", | ||||
|     "step": { | ||||
|       "reauth_confirm": { | ||||
|         "data": { | ||||
|           "password": "[%key:common::config_flow::data::password%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "password": "[%key:component::airos::config::step::user::data_description::password%]" | ||||
|         } | ||||
|       }, | ||||
|       "user": { | ||||
|         "data": { | ||||
|           "host": "[%key:common::config_flow::data::host%]", | ||||
| @@ -20,18 +12,6 @@ | ||||
|           "host": "IP address or hostname of the airOS device", | ||||
|           "username": "Administrator username for the airOS device, normally 'ubnt'", | ||||
|           "password": "Password configured through the UISP app or web interface" | ||||
|         }, | ||||
|         "sections": { | ||||
|           "advanced_settings": { | ||||
|             "data": { | ||||
|               "ssl": "Use HTTPS", | ||||
|               "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" | ||||
|             }, | ||||
|             "data_description": { | ||||
|               "ssl": "Whether the connection should be encrypted (required for most devices)", | ||||
|               "verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates" | ||||
|             } | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
| @@ -42,9 +22,7 @@ | ||||
|       "unknown": "[%key:common::config_flow::error::unknown%]" | ||||
|     }, | ||||
|     "abort": { | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", | ||||
|       "unique_id_mismatch": "Re-authentication should be used for the same device not a new one" | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
|   | ||||
| @@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|     } | ||||
| ) | ||||
|  | ||||
| URL_API_INTEGRATION = { | ||||
|     "url": "https://dashboard.airthings.com/integrations/api-integration" | ||||
| } | ||||
|  | ||||
|  | ||||
| class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Airthings.""" | ||||
| @@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_show_form( | ||||
|                 step_id="user", | ||||
|                 data_schema=STEP_USER_DATA_SCHEMA, | ||||
|                 description_placeholders=URL_API_INTEGRATION, | ||||
|                 description_placeholders={ | ||||
|                     "url": ( | ||||
|                         "https://dashboard.airthings.com/integrations/api-integration" | ||||
|                     ), | ||||
|                 }, | ||||
|             ) | ||||
|  | ||||
|         errors = {} | ||||
| @@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_create_entry(title="Airthings", data=user_input) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="user", | ||||
|             data_schema=STEP_USER_DATA_SCHEMA, | ||||
|             errors=errors, | ||||
|             description_placeholders=URL_API_INTEGRATION, | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|   | ||||
| @@ -4,10 +4,10 @@ | ||||
|       "user": { | ||||
|         "data": { | ||||
|           "id": "ID", | ||||
|           "secret": "Secret" | ||||
|         }, | ||||
|           "secret": "Secret", | ||||
|           "description": "Login at {url} to find your credentials" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
|     "error": { | ||||
|       "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", | ||||
|   | ||||
| @@ -171,7 +171,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_abort(reason="no_devices_found") | ||||
|  | ||||
|         titles = { | ||||
|             address: get_name(discovery.device) | ||||
|             address: discovery.device.name | ||||
|             for (address, discovery) in self._discovered_devices.items() | ||||
|         } | ||||
|         return self.async_show_form( | ||||
|   | ||||
| @@ -114,8 +114,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = { | ||||
|     ), | ||||
| } | ||||
|  | ||||
| PARALLEL_UPDATES = 0 | ||||
|  | ||||
|  | ||||
| @callback | ||||
| def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: | ||||
|   | ||||
| @@ -6,9 +6,6 @@ | ||||
|         "description": "[%key:component::bluetooth::config::step::user::description%]", | ||||
|         "data": { | ||||
|           "address": "[%key:common::config_flow::data::device%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "address": "The Airthings devices discovered via Bluetooth." | ||||
|         } | ||||
|       }, | ||||
|       "bluetooth_confirm": { | ||||
|   | ||||
| @@ -2,14 +2,17 @@ | ||||
|  | ||||
| from airtouch4pyapi import AirTouch | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_HOST, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryNotReady | ||||
|  | ||||
| from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator | ||||
| from .coordinator import AirtouchDataUpdateCoordinator | ||||
|  | ||||
| PLATFORMS = [Platform.CLIMATE] | ||||
|  | ||||
| type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator] | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool: | ||||
|     """Set up AirTouch4 from a config entry.""" | ||||
| @@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> | ||||
|     info = airtouch.GetAcs() | ||||
|     if not info: | ||||
|         raise ConfigEntryNotReady | ||||
|     coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch) | ||||
|     coordinator = AirtouchDataUpdateCoordinator(hass, airtouch) | ||||
|     await coordinator.async_config_entry_first_refresh() | ||||
|     entry.runtime_data = coordinator | ||||
|  | ||||
|   | ||||
| @@ -2,34 +2,26 @@ | ||||
|  | ||||
| import logging | ||||
|  | ||||
| from airtouch4pyapi import AirTouch | ||||
| from airtouch4pyapi.airtouch import AirTouchStatus | ||||
|  | ||||
| from homeassistant.components.climate import SCAN_INTERVAL | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed | ||||
|  | ||||
| from .const import DOMAIN | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator] | ||||
|  | ||||
|  | ||||
| class AirtouchDataUpdateCoordinator(DataUpdateCoordinator): | ||||
|     """Class to manage fetching Airtouch data.""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch | ||||
|     ) -> None: | ||||
|     def __init__(self, hass, airtouch): | ||||
|         """Initialize global Airtouch data updater.""" | ||||
|         self.airtouch = airtouch | ||||
|  | ||||
|         super().__init__( | ||||
|             hass, | ||||
|             _LOGGER, | ||||
|             config_entry=entry, | ||||
|             name=DOMAIN, | ||||
|             update_interval=SCAN_INTERVAL, | ||||
|         ) | ||||
|   | ||||
| @@ -6,19 +6,17 @@ from collections.abc import Callable | ||||
| from dataclasses import dataclass | ||||
| from typing import Any, Final | ||||
|  | ||||
| from aioairzone.common import GrilleAngle, OperationMode, QAdapt, SleepTimeout | ||||
| from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout | ||||
| from aioairzone.const import ( | ||||
|     API_COLD_ANGLE, | ||||
|     API_HEAT_ANGLE, | ||||
|     API_MODE, | ||||
|     API_Q_ADAPT, | ||||
|     API_SLEEP, | ||||
|     AZD_COLD_ANGLE, | ||||
|     AZD_HEAT_ANGLE, | ||||
|     AZD_MASTER, | ||||
|     AZD_MODE, | ||||
|     AZD_MODES, | ||||
|     AZD_Q_ADAPT, | ||||
|     AZD_SLEEP, | ||||
|     AZD_ZONES, | ||||
| ) | ||||
| @@ -67,14 +65,6 @@ SLEEP_DICT: Final[dict[str, int]] = { | ||||
|     "90m": SleepTimeout.SLEEP_90, | ||||
| } | ||||
|  | ||||
| Q_ADAPT_DICT: Final[dict[str, int]] = { | ||||
|     "standard": QAdapt.STANDARD, | ||||
|     "power": QAdapt.POWER, | ||||
|     "silence": QAdapt.SILENCE, | ||||
|     "minimum": QAdapt.MINIMUM, | ||||
|     "maximum": QAdapt.MAXIMUM, | ||||
| } | ||||
|  | ||||
|  | ||||
| def main_zone_options( | ||||
|     zone_data: dict[str, Any], | ||||
| @@ -93,14 +83,6 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( | ||||
|         options_fn=main_zone_options, | ||||
|         translation_key="modes", | ||||
|     ), | ||||
|     AirzoneSelectDescription( | ||||
|         api_param=API_Q_ADAPT, | ||||
|         entity_category=EntityCategory.CONFIG, | ||||
|         key=AZD_Q_ADAPT, | ||||
|         options=list(Q_ADAPT_DICT), | ||||
|         options_dict=Q_ADAPT_DICT, | ||||
|         translation_key="q_adapt", | ||||
|     ), | ||||
| ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -63,16 +63,6 @@ | ||||
|           "stop": "Stop" | ||||
|         } | ||||
|       }, | ||||
|       "q_adapt": { | ||||
|         "name": "Q-Adapt", | ||||
|         "state": { | ||||
|           "standard": "Standard", | ||||
|           "power": "Power", | ||||
|           "silence": "Silence", | ||||
|           "minimum": "Minimum", | ||||
|           "maximum": "Maximum" | ||||
|         } | ||||
|       }, | ||||
|       "sleep_times": { | ||||
|         "name": "Sleep", | ||||
|         "state": { | ||||
|   | ||||
| @@ -22,17 +22,6 @@ class OAuth2FlowHandler( | ||||
|     VERSION = CONFIG_FLOW_VERSION | ||||
|     MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Check we have the cloud integration set up.""" | ||||
|         if "cloud" not in self.hass.config.components: | ||||
|             return self.async_abort( | ||||
|                 reason="cloud_not_enabled", | ||||
|                 description_placeholders={"default_config": "default_config"}, | ||||
|             ) | ||||
|         return await super().async_step_user(user_input) | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, user_input: Mapping[str, Any] | ||||
|     ) -> ConfigFlowResult: | ||||
|   | ||||
| @@ -24,8 +24,7 @@ | ||||
|       "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", | ||||
|       "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", | ||||
|       "wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.", | ||||
|       "cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml." | ||||
|       "wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account." | ||||
|     }, | ||||
|     "create_entry": { | ||||
|       "default": "[%key:common::config_flow::create_entry::authenticated%]" | ||||
|   | ||||
| @@ -10,7 +10,6 @@ from aioamazondevices.api import AmazonDevice | ||||
| from aioamazondevices.const import SENSOR_STATE_OFF | ||||
|  | ||||
| from homeassistant.components.binary_sensor import ( | ||||
|     DOMAIN as BINARY_SENSOR_DOMAIN, | ||||
|     BinarySensorDeviceClass, | ||||
|     BinarySensorEntity, | ||||
|     BinarySensorEntityDescription, | ||||
| @@ -21,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
|  | ||||
| from .coordinator import AmazonConfigEntry | ||||
| from .entity import AmazonEntity | ||||
| from .utils import async_update_unique_id | ||||
|  | ||||
| # Coordinator is used to centralize the data updates | ||||
| PARALLEL_UPDATES = 0 | ||||
| @@ -33,7 +31,6 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription): | ||||
|  | ||||
|     is_on_fn: Callable[[AmazonDevice, str], bool] | ||||
|     is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True | ||||
|     is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True | ||||
|  | ||||
|  | ||||
| BINARY_SENSORS: Final = ( | ||||
| @@ -44,15 +41,46 @@ BINARY_SENSORS: Final = ( | ||||
|         is_on_fn=lambda device, _: device.online, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="detectionState", | ||||
|         device_class=BinarySensorDeviceClass.MOTION, | ||||
|         is_on_fn=lambda device, key: bool( | ||||
|             device.sensors[key].value != SENSOR_STATE_OFF | ||||
|         key="bluetooth", | ||||
|         entity_category=EntityCategory.DIAGNOSTIC, | ||||
|         translation_key="bluetooth", | ||||
|         is_on_fn=lambda device, _: device.bluetooth_state, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="babyCryDetectionState", | ||||
|         translation_key="baby_cry_detection", | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|         is_available_fn=lambda device, key: ( | ||||
|             device.online and device.sensors[key].error is False | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="beepingApplianceDetectionState", | ||||
|         translation_key="beeping_appliance_detection", | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="coughDetectionState", | ||||
|         translation_key="cough_detection", | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="dogBarkDetectionState", | ||||
|         translation_key="dog_bark_detection", | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="humanPresenceDetectionState", | ||||
|         device_class=BinarySensorDeviceClass.MOTION, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="waterSoundsDetectionState", | ||||
|         translation_key="water_sounds_detection", | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| @@ -66,33 +94,12 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     # Replace unique id for "detectionState" binary sensor | ||||
|     await async_update_unique_id( | ||||
|         hass, | ||||
|         coordinator, | ||||
|         BINARY_SENSOR_DOMAIN, | ||||
|         "humanPresenceDetectionState", | ||||
|         "detectionState", | ||||
|     ) | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|         AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc) | ||||
|         for sensor_desc in BINARY_SENSORS | ||||
|                 for serial_num in new_devices | ||||
|                 if sensor_desc.is_supported( | ||||
|                     coordinator.data[serial_num], sensor_desc.key | ||||
|         for serial_num in coordinator.data | ||||
|         if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key) | ||||
|     ) | ||||
|             ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity): | ||||
| @@ -106,13 +113,3 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity): | ||||
|         return self.entity_description.is_on_fn( | ||||
|             self.device, self.entity_description.key | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             self.entity_description.is_available_fn( | ||||
|                 self.device, self.entity_description.key | ||||
|             ) | ||||
|             and super().available | ||||
|         ) | ||||
|   | ||||
| @@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 data = await validate_input(self.hass, user_input) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except CannotAuthenticate: | ||||
|             except (CannotAuthenticate, TypeError): | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except CannotRetrieveData: | ||||
|                 errors["base"] = "cannot_retrieve_data" | ||||
| @@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 ) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except CannotAuthenticate: | ||||
|             except (CannotAuthenticate, TypeError): | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except CannotRetrieveData: | ||||
|                 errors["base"] = "cannot_retrieve_data" | ||||
|   | ||||
| @@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]): | ||||
|                 translation_key="cannot_retrieve_data_with_error", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|         except CannotAuthenticate as err: | ||||
|         except (CannotAuthenticate, TypeError) as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="invalid_auth", | ||||
|   | ||||
| @@ -60,5 +60,7 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]: | ||||
|         "online": device.online, | ||||
|         "serial number": device.serial_number, | ||||
|         "software version": device.software_version, | ||||
|         "sensors": device.sensors, | ||||
|         "do not disturb": device.do_not_disturb, | ||||
|         "response style": device.response_style, | ||||
|         "bluetooth state": device.bluetooth_state, | ||||
|     } | ||||
|   | ||||
| @@ -1,4 +1,44 @@ | ||||
| { | ||||
|   "entity": { | ||||
|     "binary_sensor": { | ||||
|       "bluetooth": { | ||||
|         "default": "mdi:bluetooth-off", | ||||
|         "state": { | ||||
|           "on": "mdi:bluetooth" | ||||
|         } | ||||
|       }, | ||||
|       "baby_cry_detection": { | ||||
|         "default": "mdi:account-voice-off", | ||||
|         "state": { | ||||
|           "on": "mdi:account-voice" | ||||
|         } | ||||
|       }, | ||||
|       "beeping_appliance_detection": { | ||||
|         "default": "mdi:bell-off", | ||||
|         "state": { | ||||
|           "on": "mdi:bell-ring" | ||||
|         } | ||||
|       }, | ||||
|       "cough_detection": { | ||||
|         "default": "mdi:blur-off", | ||||
|         "state": { | ||||
|           "on": "mdi:blur" | ||||
|         } | ||||
|       }, | ||||
|       "dog_bark_detection": { | ||||
|         "default": "mdi:dog-side-off", | ||||
|         "state": { | ||||
|           "on": "mdi:dog-side" | ||||
|         } | ||||
|       }, | ||||
|       "water_sounds_detection": { | ||||
|         "default": "mdi:water-pump-off", | ||||
|         "state": { | ||||
|           "on": "mdi:water-pump" | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "services": { | ||||
|     "send_sound": { | ||||
|       "service": "mdi:cast-audio" | ||||
|   | ||||
| @@ -7,6 +7,6 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["aioamazondevices"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["aioamazondevices==6.2.7"] | ||||
|   "quality_scale": "silver", | ||||
|   "requirements": ["aioamazondevices==6.0.0"] | ||||
| } | ||||
|   | ||||
| @@ -57,24 +57,14 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|         AmazonNotifyEntity(coordinator, serial_num, sensor_desc) | ||||
|         for sensor_desc in NOTIFY | ||||
|                 for serial_num in new_devices | ||||
|         for serial_num in coordinator.data | ||||
|         if sensor_desc.subkey in coordinator.data[serial_num].capabilities | ||||
|         and sensor_desc.is_supported(coordinator.data[serial_num]) | ||||
|     ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class AmazonNotifyEntity(AmazonEntity, NotifyEntity): | ||||
|     """Binary sensor notify platform.""" | ||||
|   | ||||
| @@ -53,7 +53,7 @@ rules: | ||||
|   docs-supported-functions: done | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   dynamic-devices: done | ||||
|   dynamic-devices: todo | ||||
|   entity-category: done | ||||
|   entity-device-class: done | ||||
|   entity-disabled-by-default: done | ||||
|   | ||||
| @@ -31,9 +31,6 @@ class AmazonSensorEntityDescription(SensorEntityDescription): | ||||
|     """Amazon Devices sensor entity description.""" | ||||
|  | ||||
|     native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None | ||||
|     is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( | ||||
|         device.online and device.sensors[key].error is False | ||||
|     ) | ||||
|  | ||||
|  | ||||
| SENSORS: Final = ( | ||||
| @@ -65,23 +62,13 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|         AmazonSensorEntity(coordinator, serial_num, sensor_desc) | ||||
|         for sensor_desc in SENSORS | ||||
|                 for serial_num in new_devices | ||||
|         for serial_num in coordinator.data | ||||
|         if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None | ||||
|     ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class AmazonSensorEntity(AmazonEntity, SensorEntity): | ||||
|     """Sensor device.""" | ||||
| @@ -102,13 +89,3 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity): | ||||
|     def native_value(self) -> StateType: | ||||
|         """Return the state of the sensor.""" | ||||
|         return self.device.sensors[self.entity_description.key].value | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             self.entity_description.is_available_fn( | ||||
|                 self.device, self.entity_description.key | ||||
|             ) | ||||
|             and super().available | ||||
|         ) | ||||
|   | ||||
| @@ -58,6 +58,26 @@ | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
|     "binary_sensor": { | ||||
|       "bluetooth": { | ||||
|         "name": "Bluetooth" | ||||
|       }, | ||||
|       "baby_cry_detection": { | ||||
|         "name": "Baby crying" | ||||
|       }, | ||||
|       "beeping_appliance_detection": { | ||||
|         "name": "Beeping appliance" | ||||
|       }, | ||||
|       "cough_detection": { | ||||
|         "name": "Coughing" | ||||
|       }, | ||||
|       "dog_bark_detection": { | ||||
|         "name": "Dog barking" | ||||
|       }, | ||||
|       "water_sounds_detection": { | ||||
|         "name": "Water sounds" | ||||
|       } | ||||
|     }, | ||||
|     "notify": { | ||||
|       "speak": { | ||||
|         "name": "Speak" | ||||
|   | ||||
| @@ -8,17 +8,13 @@ from typing import TYPE_CHECKING, Any, Final | ||||
|  | ||||
| from aioamazondevices.api import AmazonDevice | ||||
|  | ||||
| from homeassistant.components.switch import ( | ||||
|     DOMAIN as SWITCH_DOMAIN, | ||||
|     SwitchEntity, | ||||
|     SwitchEntityDescription, | ||||
| ) | ||||
| from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
|  | ||||
| from .coordinator import AmazonConfigEntry | ||||
| from .entity import AmazonEntity | ||||
| from .utils import alexa_api_call, async_update_unique_id | ||||
| from .utils import alexa_api_call | ||||
|  | ||||
| PARALLEL_UPDATES = 1 | ||||
|  | ||||
| @@ -28,17 +24,16 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription): | ||||
|     """Alexa Devices switch entity description.""" | ||||
|  | ||||
|     is_on_fn: Callable[[AmazonDevice], bool] | ||||
|     is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( | ||||
|         device.online and device.sensors[key].error is False | ||||
|     ) | ||||
|     subkey: str | ||||
|     method: str | ||||
|  | ||||
|  | ||||
| SWITCHES: Final = ( | ||||
|     AmazonSwitchEntityDescription( | ||||
|         key="dnd", | ||||
|         key="do_not_disturb", | ||||
|         subkey="AUDIO_PLAYER", | ||||
|         translation_key="do_not_disturb", | ||||
|         is_on_fn=lambda device: bool(device.sensors["dnd"].value), | ||||
|         is_on_fn=lambda _device: _device.do_not_disturb, | ||||
|         method="set_do_not_disturb", | ||||
|     ), | ||||
| ) | ||||
| @@ -53,28 +48,13 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     # Replace unique id for "DND" switch and remove from Speaker Group | ||||
|     await async_update_unique_id( | ||||
|         hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd" | ||||
|     ) | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|         AmazonSwitchEntity(coordinator, serial_num, switch_desc) | ||||
|         for switch_desc in SWITCHES | ||||
|                 for serial_num in new_devices | ||||
|                 if switch_desc.key in coordinator.data[serial_num].sensors | ||||
|         for serial_num in coordinator.data | ||||
|         if switch_desc.subkey in coordinator.data[serial_num].capabilities | ||||
|     ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class AmazonSwitchEntity(AmazonEntity, SwitchEntity): | ||||
|     """Switch device.""" | ||||
| @@ -104,13 +84,3 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity): | ||||
|     def is_on(self) -> bool: | ||||
|         """Return True if switch is on.""" | ||||
|         return self.entity_description.is_on_fn(self.device) | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             self.entity_description.is_available_fn( | ||||
|                 self.device, self.entity_description.key | ||||
|             ) | ||||
|             and super().available | ||||
|         ) | ||||
|   | ||||
| @@ -6,12 +6,9 @@ from typing import Any, Concatenate | ||||
|  | ||||
| from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData | ||||
|  | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| import homeassistant.helpers.entity_registry as er | ||||
|  | ||||
| from .const import _LOGGER, DOMAIN | ||||
| from .coordinator import AmazonDevicesCoordinator | ||||
| from .const import DOMAIN | ||||
| from .entity import AmazonEntity | ||||
|  | ||||
|  | ||||
| @@ -41,23 +38,3 @@ def alexa_api_call[_T: AmazonEntity, **_P]( | ||||
|             ) from err | ||||
|  | ||||
|     return cmd_wrapper | ||||
|  | ||||
|  | ||||
| async def async_update_unique_id( | ||||
|     hass: HomeAssistant, | ||||
|     coordinator: AmazonDevicesCoordinator, | ||||
|     domain: str, | ||||
|     old_key: str, | ||||
|     new_key: str, | ||||
| ) -> None: | ||||
|     """Update unique id for entities created with old format.""" | ||||
|     entity_registry = er.async_get(hass) | ||||
|  | ||||
|     for serial_num in coordinator.data: | ||||
|         unique_id = f"{serial_num}-{old_key}" | ||||
|         if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id): | ||||
|             _LOGGER.debug("Updating unique_id for %s", entity_id) | ||||
|             new_unique_id = unique_id.replace(old_key, new_key) | ||||
|  | ||||
|             # Update the registry with the new unique_id | ||||
|             entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id) | ||||
|   | ||||
| @@ -41,7 +41,7 @@ def async_setup_services(hass: HomeAssistant) -> None: | ||||
|         if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE: | ||||
|             return [] | ||||
|  | ||||
|         call_ids = await async_extract_entity_ids(call) | ||||
|         call_ids = await async_extract_entity_ids(hass, call) | ||||
|         entity_ids = [] | ||||
|         for entity_id in hass.data[DATA_AMCREST][CAMERAS]: | ||||
|             if entity_id not in call_ids: | ||||
|   | ||||
| @@ -12,25 +12,10 @@ from homeassistant.helpers.event import async_call_later, async_track_time_inter | ||||
| from homeassistant.helpers.typing import ConfigType | ||||
| from homeassistant.util.hass_dict import HassKey | ||||
|  | ||||
| from .analytics import ( | ||||
|     Analytics, | ||||
|     AnalyticsInput, | ||||
|     AnalyticsModifications, | ||||
|     DeviceAnalyticsModifications, | ||||
|     EntityAnalyticsModifications, | ||||
|     async_devices_payload, | ||||
| ) | ||||
| from .analytics import Analytics | ||||
| from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA | ||||
| from .http import AnalyticsDevicesView | ||||
|  | ||||
| __all__ = [ | ||||
|     "AnalyticsInput", | ||||
|     "AnalyticsModifications", | ||||
|     "DeviceAnalyticsModifications", | ||||
|     "EntityAnalyticsModifications", | ||||
|     "async_devices_payload", | ||||
| ] | ||||
|  | ||||
| CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) | ||||
|  | ||||
| DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN) | ||||
|   | ||||
| @@ -4,10 +4,9 @@ from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from asyncio import timeout | ||||
| from collections.abc import Awaitable, Callable, Iterable, Mapping | ||||
| from dataclasses import asdict as dataclass_asdict, dataclass, field | ||||
| from dataclasses import asdict as dataclass_asdict, dataclass | ||||
| from datetime import datetime | ||||
| from typing import Any, Protocol | ||||
| from typing import Any | ||||
| import uuid | ||||
|  | ||||
| import aiohttp | ||||
| @@ -36,14 +35,11 @@ from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import device_registry as dr, entity_registry as er | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.hassio import is_hassio | ||||
| from homeassistant.helpers.singleton import singleton | ||||
| from homeassistant.helpers.storage import Store | ||||
| from homeassistant.helpers.system_info import async_get_system_info | ||||
| from homeassistant.helpers.typing import UNDEFINED | ||||
| from homeassistant.loader import ( | ||||
|     Integration, | ||||
|     IntegrationNotFound, | ||||
|     async_get_integration, | ||||
|     async_get_integrations, | ||||
| ) | ||||
| from homeassistant.setup import async_get_loaded_integrations | ||||
| @@ -79,115 +75,12 @@ from .const import ( | ||||
|     ATTR_USER_COUNT, | ||||
|     ATTR_UUID, | ||||
|     ATTR_VERSION, | ||||
|     DOMAIN, | ||||
|     LOGGER, | ||||
|     PREFERENCE_SCHEMA, | ||||
|     STORAGE_KEY, | ||||
|     STORAGE_VERSION, | ||||
| ) | ||||
|  | ||||
| DATA_ANALYTICS_MODIFIERS = "analytics_modifiers" | ||||
|  | ||||
| type AnalyticsModifier = Callable[ | ||||
|     [HomeAssistant, AnalyticsInput], Awaitable[AnalyticsModifications] | ||||
| ] | ||||
|  | ||||
|  | ||||
| @singleton(DATA_ANALYTICS_MODIFIERS) | ||||
| def _async_get_modifiers( | ||||
|     hass: HomeAssistant, | ||||
| ) -> dict[str, AnalyticsModifier | None]: | ||||
|     """Return the analytics modifiers.""" | ||||
|     return {} | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class AnalyticsInput: | ||||
|     """Analytics input for a single integration. | ||||
|  | ||||
|     This is sent to integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     device_ids: Iterable[str] = field(default_factory=list) | ||||
|     entity_ids: Iterable[str] = field(default_factory=list) | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class AnalyticsModifications: | ||||
|     """Analytics config for a single integration. | ||||
|  | ||||
|     This is used by integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     remove: bool = False | ||||
|     devices: Mapping[str, DeviceAnalyticsModifications] | None = None | ||||
|     entities: Mapping[str, EntityAnalyticsModifications] | None = None | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class DeviceAnalyticsModifications: | ||||
|     """Analytics config for a single device. | ||||
|  | ||||
|     This is used by integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     remove: bool = False | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class EntityAnalyticsModifications: | ||||
|     """Analytics config for a single entity. | ||||
|  | ||||
|     This is used by integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     remove: bool = False | ||||
|  | ||||
|  | ||||
| class AnalyticsPlatformProtocol(Protocol): | ||||
|     """Define the format of analytics platforms.""" | ||||
|  | ||||
|     async def async_modify_analytics( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
|         analytics_input: AnalyticsInput, | ||||
|     ) -> AnalyticsModifications: | ||||
|         """Modify the analytics.""" | ||||
|  | ||||
|  | ||||
| async def _async_get_analytics_platform( | ||||
|     hass: HomeAssistant, domain: str | ||||
| ) -> AnalyticsPlatformProtocol | None: | ||||
|     """Get analytics platform.""" | ||||
|     try: | ||||
|         integration = await async_get_integration(hass, domain) | ||||
|     except IntegrationNotFound: | ||||
|         return None | ||||
|     try: | ||||
|         return await integration.async_get_platform(DOMAIN) | ||||
|     except ImportError: | ||||
|         return None | ||||
|  | ||||
|  | ||||
| async def _async_get_modifier( | ||||
|     hass: HomeAssistant, domain: str | ||||
| ) -> AnalyticsModifier | None: | ||||
|     """Get analytics modifier.""" | ||||
|     modifiers = _async_get_modifiers(hass) | ||||
|     modifier = modifiers.get(domain, UNDEFINED) | ||||
|  | ||||
|     if modifier is not UNDEFINED: | ||||
|         return modifier | ||||
|  | ||||
|     platform = await _async_get_analytics_platform(hass, domain) | ||||
|     if platform is None: | ||||
|         modifiers[domain] = None | ||||
|         return None | ||||
|  | ||||
|     modifier = getattr(platform, "async_modify_analytics", None) | ||||
|     modifiers[domain] = modifier | ||||
|     return modifier | ||||
|  | ||||
|  | ||||
| def gen_uuid() -> str: | ||||
|     """Generate a new UUID.""" | ||||
| @@ -500,22 +393,17 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]: | ||||
|     return domains | ||||
|  | ||||
|  | ||||
| DEFAULT_ANALYTICS_CONFIG = AnalyticsModifications() | ||||
| DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications() | ||||
| DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications() | ||||
|  | ||||
|  | ||||
| async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
| async def async_devices_payload(hass: HomeAssistant) -> dict: | ||||
|     """Return detailed information about entities and devices.""" | ||||
|     integrations_info: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|     dev_reg = dr.async_get(hass) | ||||
|     ent_reg = er.async_get(hass) | ||||
|  | ||||
|     integration_inputs: dict[str, tuple[list[str], list[str]]] = {} | ||||
|     integration_configs: dict[str, AnalyticsModifications] = {} | ||||
|     # We need to refer to other devices, for example in `via_device` field. | ||||
|     # We don't however send the original device ids outside of Home Assistant, | ||||
|     # instead we refer to devices by (integration_domain, index_in_integration_device_list). | ||||
|     device_id_mapping: dict[str, tuple[str, int]] = {} | ||||
|  | ||||
|     removed_devices: set[str] = set() | ||||
|  | ||||
|     # Get device list | ||||
|     for device_entry in dev_reg.devices.values(): | ||||
|         if not device_entry.primary_config_entry: | ||||
|             continue | ||||
| @@ -527,108 +415,18 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|         if config_entry is None: | ||||
|             continue | ||||
|  | ||||
|         if device_entry.entry_type is dr.DeviceEntryType.SERVICE: | ||||
|             removed_devices.add(device_entry.id) | ||||
|             continue | ||||
|  | ||||
|         integration_domain = config_entry.domain | ||||
|  | ||||
|         integration_input = integration_inputs.setdefault(integration_domain, ([], [])) | ||||
|         integration_input[0].append(device_entry.id) | ||||
|  | ||||
|     # Get entity list | ||||
|     for entity_entry in ent_reg.entities.values(): | ||||
|         integration_domain = entity_entry.platform | ||||
|  | ||||
|         integration_input = integration_inputs.setdefault(integration_domain, ([], [])) | ||||
|         integration_input[1].append(entity_entry.entity_id) | ||||
|  | ||||
|     integrations = { | ||||
|         domain: integration | ||||
|         for domain, integration in ( | ||||
|             await async_get_integrations(hass, integration_inputs.keys()) | ||||
|         ).items() | ||||
|         if isinstance(integration, Integration) | ||||
|     } | ||||
|  | ||||
|     # Filter out custom integrations and integrations that are not device or hub type | ||||
|     integration_inputs = { | ||||
|         domain: integration_info | ||||
|         for domain, integration_info in integration_inputs.items() | ||||
|         if (integration := integrations.get(domain)) is not None | ||||
|         and integration.is_built_in | ||||
|         and integration.manifest.get("integration_type") in ("device", "hub") | ||||
|     } | ||||
|  | ||||
|     # Call integrations that implement the analytics platform | ||||
|     for integration_domain, integration_input in integration_inputs.items(): | ||||
|         if ( | ||||
|             modifier := await _async_get_modifier(hass, integration_domain) | ||||
|         ) is not None: | ||||
|             try: | ||||
|                 integration_config = await modifier( | ||||
|                     hass, AnalyticsInput(*integration_input) | ||||
|                 ) | ||||
|             except Exception as err:  # noqa: BLE001 | ||||
|                 LOGGER.exception( | ||||
|                     "Calling async_modify_analytics for integration '%s' failed: %s", | ||||
|                     integration_domain, | ||||
|                     err, | ||||
|                 ) | ||||
|                 integration_configs[integration_domain] = AnalyticsModifications( | ||||
|                     remove=True | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             if not isinstance(integration_config, AnalyticsModifications): | ||||
|                 LOGGER.error(  # type: ignore[unreachable] | ||||
|                     "Calling async_modify_analytics for integration '%s' did not return an AnalyticsConfig", | ||||
|                     integration_domain, | ||||
|                 ) | ||||
|                 integration_configs[integration_domain] = AnalyticsModifications( | ||||
|                     remove=True | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             integration_configs[integration_domain] = integration_config | ||||
|  | ||||
|     integrations_info: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|     # We need to refer to other devices, for example in `via_device` field. | ||||
|     # We don't however send the original device ids outside of Home Assistant, | ||||
|     # instead we refer to devices by (integration_domain, index_in_integration_device_list). | ||||
|     device_id_mapping: dict[str, tuple[str, int]] = {} | ||||
|  | ||||
|     # Fill out information about devices | ||||
|     for integration_domain, integration_input in integration_inputs.items(): | ||||
|         integration_config = integration_configs.get( | ||||
|             integration_domain, DEFAULT_ANALYTICS_CONFIG | ||||
|         ) | ||||
|  | ||||
|         if integration_config.remove: | ||||
|             continue | ||||
|  | ||||
|         integration_info = integrations_info.setdefault( | ||||
|             integration_domain, {"devices": [], "entities": []} | ||||
|         ) | ||||
|  | ||||
|         devices_info = integration_info["devices"] | ||||
|  | ||||
|         for device_id in integration_input[0]: | ||||
|             device_config = DEFAULT_DEVICE_ANALYTICS_CONFIG | ||||
|             if integration_config.devices is not None: | ||||
|                 device_config = integration_config.devices.get(device_id, device_config) | ||||
|  | ||||
|             if device_config.remove: | ||||
|                 removed_devices.add(device_id) | ||||
|                 continue | ||||
|  | ||||
|             device_entry = dev_reg.devices[device_id] | ||||
|  | ||||
|             device_id_mapping[device_id] = (integration_domain, len(devices_info)) | ||||
|         device_id_mapping[device_entry.id] = (integration_domain, len(devices_info)) | ||||
|  | ||||
|         devices_info.append( | ||||
|             { | ||||
|                 "entities": [], | ||||
|                 "entry_type": device_entry.entry_type, | ||||
|                 "has_configuration_url": device_entry.configuration_url is not None, | ||||
|                 "hw_version": device_entry.hw_version, | ||||
| @@ -637,7 +435,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|                 "model_id": device_entry.model_id, | ||||
|                 "sw_version": device_entry.sw_version, | ||||
|                 "via_device": device_entry.via_device_id, | ||||
|                     "entities": [], | ||||
|             } | ||||
|         ) | ||||
|  | ||||
| @@ -648,15 +445,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|                 continue | ||||
|             device_info["via_device"] = device_id_mapping.get(device_info["via_device"]) | ||||
|  | ||||
|     # Fill out information about entities | ||||
|     for integration_domain, integration_input in integration_inputs.items(): | ||||
|         integration_config = integration_configs.get( | ||||
|             integration_domain, DEFAULT_ANALYTICS_CONFIG | ||||
|         ) | ||||
|  | ||||
|         if integration_config.remove: | ||||
|             continue | ||||
|     ent_reg = er.async_get(hass) | ||||
|  | ||||
|     for entity_entry in ent_reg.entities.values(): | ||||
|         integration_domain = entity_entry.platform | ||||
|         integration_info = integrations_info.setdefault( | ||||
|             integration_domain, {"devices": [], "entities": []} | ||||
|         ) | ||||
| @@ -664,30 +456,17 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|         devices_info = integration_info["devices"] | ||||
|         entities_info = integration_info["entities"] | ||||
|  | ||||
|         for entity_id in integration_input[1]: | ||||
|             entity_config = DEFAULT_ENTITY_ANALYTICS_CONFIG | ||||
|             if integration_config.entities is not None: | ||||
|                 entity_config = integration_config.entities.get( | ||||
|                     entity_id, entity_config | ||||
|                 ) | ||||
|  | ||||
|             if entity_config.remove: | ||||
|                 continue | ||||
|  | ||||
|             entity_entry = ent_reg.entities[entity_id] | ||||
|  | ||||
|             entity_state = hass.states.get(entity_id) | ||||
|         entity_state = hass.states.get(entity_entry.entity_id) | ||||
|  | ||||
|         entity_info = { | ||||
|             # LIMITATION: `assumed_state` can be overridden by users; | ||||
|             # we should replace it with the original value in the future. | ||||
|             # It is also not present, if entity is not in the state machine, | ||||
|             # which can happen for disabled entities. | ||||
|                 "assumed_state": ( | ||||
|                     entity_state.attributes.get(ATTR_ASSUMED_STATE, False) | ||||
|             "assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False) | ||||
|             if entity_state is not None | ||||
|                     else None | ||||
|                 ), | ||||
|             else None, | ||||
|             "capabilities": entity_entry.capabilities, | ||||
|             "domain": entity_entry.domain, | ||||
|             "entity_category": entity_entry.entity_category, | ||||
|             "has_entity_name": entity_entry.has_entity_name, | ||||
| @@ -697,20 +476,33 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|             "unit_of_measurement": entity_entry.unit_of_measurement, | ||||
|         } | ||||
|  | ||||
|             if (device_id_ := entity_entry.device_id) is not None: | ||||
|                 if device_id_ in removed_devices: | ||||
|                     # The device was removed, so we remove the entity too | ||||
|                     continue | ||||
|  | ||||
|         if ( | ||||
|                     new_device_id := device_id_mapping.get(device_id_) | ||||
|                 ) is not None and (new_device_id[0] == integration_domain): | ||||
|             ((device_id := entity_entry.device_id) is not None) | ||||
|             and ((new_device_id := device_id_mapping.get(device_id)) is not None) | ||||
|             and (new_device_id[0] == integration_domain) | ||||
|         ): | ||||
|             device_info = devices_info[new_device_id[1]] | ||||
|             device_info["entities"].append(entity_info) | ||||
|                     continue | ||||
|  | ||||
|         else: | ||||
|             entities_info.append(entity_info) | ||||
|  | ||||
|     integrations = { | ||||
|         domain: integration | ||||
|         for domain, integration in ( | ||||
|             await async_get_integrations(hass, integrations_info.keys()) | ||||
|         ).items() | ||||
|         if isinstance(integration, Integration) | ||||
|     } | ||||
|  | ||||
|     for domain, integration_info in integrations_info.items(): | ||||
|         if integration := integrations.get(domain): | ||||
|             integration_info["is_custom_integration"] = not integration.is_built_in | ||||
|             # Include version for custom integrations | ||||
|             if not integration.is_built_in and integration.version: | ||||
|                 integration_info["custom_integration_version"] = str( | ||||
|                     integration.version | ||||
|                 ) | ||||
|  | ||||
|     return { | ||||
|         "version": "home-assistant:1", | ||||
|         "home_assistant": HA_VERSION, | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|   "domain": "analytics", | ||||
|   "name": "Analytics", | ||||
|   "after_dependencies": ["energy", "hassio", "recorder"], | ||||
|   "codeowners": ["@home-assistant/core"], | ||||
|   "codeowners": ["@home-assistant/core", "@ludeeus"], | ||||
|   "dependencies": ["api", "websocket_api", "http"], | ||||
|   "documentation": "https://www.home-assistant.io/integrations/analytics", | ||||
|   "integration_type": "system", | ||||
|   | ||||
| @@ -467,10 +467,7 @@ async def async_setup_entry( | ||||
|     # periodical (or manual) self test since last daemon restart. It might not be available | ||||
|     # when we set up the integration, and we do not know if it would ever be available. Here we | ||||
|     # add it anyway and mark it as unknown initially. | ||||
|     # | ||||
|     # We also sort the resources to ensure the order of entities created is deterministic since | ||||
|     # "APCMODEL" and "MODEL" resources map to the same "Model" name. | ||||
|     for resource in sorted(available_resources | {LAST_S_TEST}): | ||||
|     for resource in available_resources | {LAST_S_TEST}: | ||||
|         if resource not in SENSORS: | ||||
|             _LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper()) | ||||
|             continue | ||||
|   | ||||
| @@ -1308,9 +1308,7 @@ class PipelineRun: | ||||
|                     # instead of a full response. | ||||
|                     all_targets_in_satellite_area = ( | ||||
|                         self._get_all_targets_in_satellite_area( | ||||
|                             conversation_result.response, | ||||
|                             self._satellite_id, | ||||
|                             self._device_id, | ||||
|                             conversation_result.response, self._device_id | ||||
|                         ) | ||||
|                     ) | ||||
|  | ||||
| @@ -1339,62 +1337,39 @@ class PipelineRun: | ||||
|         return (speech, all_targets_in_satellite_area) | ||||
|  | ||||
|     def _get_all_targets_in_satellite_area( | ||||
|         self, | ||||
|         intent_response: intent.IntentResponse, | ||||
|         satellite_id: str | None, | ||||
|         device_id: str | None, | ||||
|         self, intent_response: intent.IntentResponse, device_id: str | None | ||||
|     ) -> bool: | ||||
|         """Return true if all targeted entities were in the same area as the device.""" | ||||
|         if ( | ||||
|             intent_response.response_type != intent.IntentResponseType.ACTION_DONE | ||||
|             or not intent_response.matched_states | ||||
|             (intent_response.response_type != intent.IntentResponseType.ACTION_DONE) | ||||
|             or (not intent_response.matched_states) | ||||
|             or (not device_id) | ||||
|         ): | ||||
|             return False | ||||
|  | ||||
|         device_registry = dr.async_get(self.hass) | ||||
|  | ||||
|         if (not (device := device_registry.async_get(device_id))) or ( | ||||
|             not device.area_id | ||||
|         ): | ||||
|             return False | ||||
|  | ||||
|         entity_registry = er.async_get(self.hass) | ||||
|         device_registry = dr.async_get(self.hass) | ||||
|  | ||||
|         area_id: str | None = None | ||||
|  | ||||
|         if ( | ||||
|             satellite_id is not None | ||||
|             and (target_entity_entry := entity_registry.async_get(satellite_id)) | ||||
|             is not None | ||||
|         ): | ||||
|             area_id = target_entity_entry.area_id | ||||
|             device_id = target_entity_entry.device_id | ||||
|  | ||||
|         if area_id is None: | ||||
|             if device_id is None: | ||||
|                 return False | ||||
|  | ||||
|             device_entry = device_registry.async_get(device_id) | ||||
|             if device_entry is None: | ||||
|                 return False | ||||
|  | ||||
|             area_id = device_entry.area_id | ||||
|             if area_id is None: | ||||
|                 return False | ||||
|  | ||||
|         for state in intent_response.matched_states: | ||||
|             target_entity_entry = entity_registry.async_get(state.entity_id) | ||||
|             if target_entity_entry is None: | ||||
|             entity = entity_registry.async_get(state.entity_id) | ||||
|             if not entity: | ||||
|                 return False | ||||
|  | ||||
|             target_area_id = target_entity_entry.area_id | ||||
|             if target_area_id is None: | ||||
|                 if target_entity_entry.device_id is None: | ||||
|             if (entity_area_id := entity.area_id) is None: | ||||
|                 if (entity.device_id is None) or ( | ||||
|                     (entity_device := device_registry.async_get(entity.device_id)) | ||||
|                     is None | ||||
|                 ): | ||||
|                     return False | ||||
|  | ||||
|                 target_device_entry = device_registry.async_get( | ||||
|                     target_entity_entry.device_id | ||||
|                 ) | ||||
|                 if target_device_entry is None: | ||||
|                     return False | ||||
|                 entity_area_id = entity_device.area_id | ||||
|  | ||||
|                 target_area_id = target_device_entry.area_id | ||||
|  | ||||
|             if target_area_id != area_id: | ||||
|             if entity_area_id != device.area_id: | ||||
|                 return False | ||||
|  | ||||
|         return True | ||||
|   | ||||
| @@ -109,7 +109,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity): | ||||
|         ) | ||||
|  | ||||
|         state = await self.async_get_last_state() | ||||
|         if (state is not None) and (state.state in self.options): | ||||
|         if state is not None and state.state in self.options: | ||||
|             self._attr_current_option = state.state | ||||
|  | ||||
|         if self.registry_entry and (device_id := self.registry_entry.device_id): | ||||
| @@ -119,7 +119,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity): | ||||
|  | ||||
|             def cleanup() -> None: | ||||
|                 """Clean up registered device.""" | ||||
|                 pipeline_data.pipeline_devices.pop(device_id, None) | ||||
|                 pipeline_data.pipeline_devices.pop(device_id) | ||||
|  | ||||
|             self.async_on_remove(cleanup) | ||||
|  | ||||
|   | ||||
| @@ -120,7 +120,6 @@ class AsusWrtBridge(ABC): | ||||
|  | ||||
|     def __init__(self, host: str) -> None: | ||||
|         """Initialize Bridge.""" | ||||
|         self._configuration_url = f"http://{host}" | ||||
|         self._host = host | ||||
|         self._firmware: str | None = None | ||||
|         self._label_mac: str | None = None | ||||
| @@ -128,11 +127,6 @@ class AsusWrtBridge(ABC): | ||||
|         self._model_id: str | None = None | ||||
|         self._serial_number: str | None = None | ||||
|  | ||||
|     @property | ||||
|     def configuration_url(self) -> str: | ||||
|         """Return configuration URL.""" | ||||
|         return self._configuration_url | ||||
|  | ||||
|     @property | ||||
|     def host(self) -> str: | ||||
|         """Return hostname.""" | ||||
| @@ -377,7 +371,6 @@ class AsusWrtHttpBridge(AsusWrtBridge): | ||||
|         # get main router properties | ||||
|         if mac := _identity.mac: | ||||
|             self._label_mac = format_mac(mac) | ||||
|         self._configuration_url = self._api.webpanel | ||||
|         self._firmware = str(_identity.firmware) | ||||
|         self._model = _identity.model | ||||
|         self._model_id = _identity.product_id | ||||
|   | ||||
| @@ -2,7 +2,9 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import Any | ||||
| from typing import Any, TypeVar | ||||
|  | ||||
| T = TypeVar("T", dict[str, Any], list[Any], None) | ||||
|  | ||||
| TRANSLATION_MAP = { | ||||
|     "wan_rx": "sensor_rx_bytes", | ||||
| @@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]: | ||||
|     return {k: v for k, v in raw.items() if v is not None or k.endswith("state")} | ||||
|  | ||||
|  | ||||
| def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T: | ||||
| def translate_to_legacy(raw: T) -> T: | ||||
|     """Translate raw data to legacy format for dicts and lists.""" | ||||
|  | ||||
|     if raw is None: | ||||
|   | ||||
| @@ -388,13 +388,13 @@ class AsusWrtRouter: | ||||
|     def device_info(self) -> DeviceInfo: | ||||
|         """Return the device information.""" | ||||
|         info = DeviceInfo( | ||||
|             configuration_url=self._api.configuration_url, | ||||
|             identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")}, | ||||
|             name=self.host, | ||||
|             model=self._api.model or "Asus Router", | ||||
|             model_id=self._api.model_id, | ||||
|             serial_number=self._api.serial_number, | ||||
|             manufacturer="Asus", | ||||
|             configuration_url=f"http://{self.host}", | ||||
|         ) | ||||
|         if self._api.firmware: | ||||
|             info["sw_version"] = self._api.firmware | ||||
|   | ||||
| @@ -2,12 +2,13 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Callable, Coroutine | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from aiohttp import ClientResponseError | ||||
| from yalexs.activity import ActivityType | ||||
| from yalexs.lock import Lock, LockOperation, LockStatus | ||||
| from yalexs.activity import ActivityType, ActivityTypes | ||||
| from yalexs.lock import Lock, LockStatus | ||||
| from yalexs.util import get_latest_activity, update_lock_detail_from_activity | ||||
|  | ||||
| from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature | ||||
| @@ -49,25 +50,30 @@ class AugustLock(AugustEntity, RestoreEntity, LockEntity): | ||||
|  | ||||
|     async def async_lock(self, **kwargs: Any) -> None: | ||||
|         """Lock the device.""" | ||||
|         await self._perform_lock_operation(LockOperation.LOCK) | ||||
|         if self._data.push_updates_connected: | ||||
|             await self._data.async_lock_async(self._device_id, self._hyper_bridge) | ||||
|             return | ||||
|         await self._call_lock_operation(self._data.async_lock) | ||||
|  | ||||
|     async def async_open(self, **kwargs: Any) -> None: | ||||
|         """Open/unlatch the device.""" | ||||
|         await self._perform_lock_operation(LockOperation.OPEN) | ||||
|         if self._data.push_updates_connected: | ||||
|             await self._data.async_unlatch_async(self._device_id, self._hyper_bridge) | ||||
|             return | ||||
|         await self._call_lock_operation(self._data.async_unlatch) | ||||
|  | ||||
|     async def async_unlock(self, **kwargs: Any) -> None: | ||||
|         """Unlock the device.""" | ||||
|         await self._perform_lock_operation(LockOperation.UNLOCK) | ||||
|         if self._data.push_updates_connected: | ||||
|             await self._data.async_unlock_async(self._device_id, self._hyper_bridge) | ||||
|             return | ||||
|         await self._call_lock_operation(self._data.async_unlock) | ||||
|  | ||||
|     async def _perform_lock_operation(self, operation: LockOperation) -> None: | ||||
|         """Perform a lock operation.""" | ||||
|     async def _call_lock_operation( | ||||
|         self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]] | ||||
|     ) -> None: | ||||
|         try: | ||||
|             activities = await self._data.async_operate_lock( | ||||
|                 self._device_id, | ||||
|                 operation, | ||||
|                 self._data.push_updates_connected, | ||||
|                 self._hyper_bridge, | ||||
|             ) | ||||
|             activities = await lock_operation(self._device_id) | ||||
|         except ClientResponseError as err: | ||||
|             if err.status == LOCK_JAMMED_ERR: | ||||
|                 self._detail.lock_status = LockStatus.JAMMED | ||||
|   | ||||
| @@ -29,5 +29,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/august", | ||||
|   "iot_class": "cloud_push", | ||||
|   "loggers": ["pubnub", "yalexs"], | ||||
|   "requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"] | ||||
|   "requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"] | ||||
| } | ||||
|   | ||||
| @@ -26,6 +26,9 @@ async def async_setup_entry( | ||||
|  | ||||
|     if CONF_HOST in config_entry.data: | ||||
|         coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session) | ||||
|         config_entry.async_on_unload( | ||||
|             config_entry.add_update_listener(_async_update_listener) | ||||
|         ) | ||||
|     else: | ||||
|         coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session) | ||||
|  | ||||
| @@ -33,11 +36,6 @@ async def async_setup_entry( | ||||
|  | ||||
|     config_entry.runtime_data = coordinator | ||||
|  | ||||
|     if CONF_HOST in config_entry.data: | ||||
|         config_entry.async_on_unload( | ||||
|             config_entry.add_update_listener(_async_update_listener) | ||||
|         ) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) | ||||
|  | ||||
|     return True | ||||
|   | ||||
| @@ -26,6 +26,7 @@ EXCLUDE_FROM_BACKUP = [ | ||||
|     "tmp_backups/*.tar", | ||||
|     "OZW_Log.txt", | ||||
|     "tts/*", | ||||
|     "ai_task/*", | ||||
| ] | ||||
|  | ||||
| EXCLUDE_DATABASE_FROM_BACKUP = [ | ||||
|   | ||||
| @@ -8,7 +8,7 @@ import threading | ||||
| from typing import IO, cast | ||||
|  | ||||
| from aiohttp import BodyPartReader | ||||
| from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE | ||||
| from aiohttp.hdrs import CONTENT_DISPOSITION | ||||
| from aiohttp.web import FileResponse, Request, Response, StreamResponse | ||||
| from multidict import istr | ||||
|  | ||||
| @@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import frame | ||||
| from homeassistant.util import slugify | ||||
| from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter | ||||
|  | ||||
| from . import util | ||||
| from .agent import BackupAgent | ||||
| @@ -77,8 +76,7 @@ class DownloadBackupView(HomeAssistantView): | ||||
|             return Response(status=HTTPStatus.NOT_FOUND) | ||||
|  | ||||
|         headers = { | ||||
|             CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar", | ||||
|             CONTENT_TYPE: "application/x-tar", | ||||
|             CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" | ||||
|         } | ||||
|  | ||||
|         try: | ||||
| @@ -145,7 +143,7 @@ class DownloadBackupView(HomeAssistantView): | ||||
|                 return Response(status=HTTPStatus.NOT_FOUND) | ||||
|         else: | ||||
|             stream = await agent.async_download_backup(backup_id) | ||||
|             reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream)) | ||||
|             reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream)) | ||||
|  | ||||
|         worker_done_event = asyncio.Event() | ||||
|  | ||||
| @@ -153,7 +151,7 @@ class DownloadBackupView(HomeAssistantView): | ||||
|             """Call by the worker thread when it's done.""" | ||||
|             hass.loop.call_soon_threadsafe(worker_done_event.set) | ||||
|  | ||||
|         stream = AsyncIteratorWriter(hass.loop) | ||||
|         stream = util.AsyncIteratorWriter(hass) | ||||
|         worker = threading.Thread( | ||||
|             target=util.decrypt_backup, | ||||
|             args=[backup, reader, stream, password, on_done, 0, []], | ||||
|   | ||||
| @@ -38,7 +38,6 @@ from homeassistant.helpers import ( | ||||
| ) | ||||
| from homeassistant.helpers.json import json_bytes | ||||
| from homeassistant.util import dt as dt_util, json as json_util | ||||
| from homeassistant.util.async_iterator import AsyncIteratorReader | ||||
|  | ||||
| from . import util as backup_util | ||||
| from .agent import ( | ||||
| @@ -73,6 +72,7 @@ from .models import ( | ||||
| ) | ||||
| from .store import BackupStore | ||||
| from .util import ( | ||||
|     AsyncIteratorReader, | ||||
|     DecryptedBackupStreamer, | ||||
|     EncryptedBackupStreamer, | ||||
|     make_backup_dir, | ||||
| @@ -1525,7 +1525,7 @@ class BackupManager: | ||||
|             reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb") | ||||
|         else: | ||||
|             backup_stream = await agent.async_download_backup(backup_id) | ||||
|             reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream)) | ||||
|             reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream)) | ||||
|         try: | ||||
|             await self.hass.async_add_executor_job( | ||||
|                 validate_password_stream, reader, password | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import AsyncIterator, Callable, Coroutine | ||||
| from concurrent.futures import CancelledError, Future | ||||
| import copy | ||||
| from dataclasses import dataclass, replace | ||||
| from io import BytesIO | ||||
| @@ -13,7 +14,7 @@ from pathlib import Path, PurePath | ||||
| from queue import SimpleQueue | ||||
| import tarfile | ||||
| import threading | ||||
| from typing import IO, Any, cast | ||||
| from typing import IO, Any, Self, cast | ||||
|  | ||||
| import aiohttp | ||||
| from securetar import SecureTarError, SecureTarFile, SecureTarReadError | ||||
| @@ -22,11 +23,6 @@ from homeassistant.backup_restore import password_to_key | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.util import dt as dt_util | ||||
| from homeassistant.util.async_iterator import ( | ||||
|     Abort, | ||||
|     AsyncIteratorReader, | ||||
|     AsyncIteratorWriter, | ||||
| ) | ||||
| from homeassistant.util.json import JsonObjectType, json_loads_object | ||||
|  | ||||
| from .const import BUF_SIZE, LOGGER | ||||
| @@ -63,6 +59,12 @@ class BackupEmpty(DecryptError): | ||||
|     _message = "No tar files found in the backup." | ||||
|  | ||||
|  | ||||
| class AbortCipher(HomeAssistantError): | ||||
|     """Abort the cipher operation.""" | ||||
|  | ||||
|     _message = "Abort cipher operation." | ||||
|  | ||||
|  | ||||
| def make_backup_dir(path: Path) -> None: | ||||
|     """Create a backup directory if it does not exist.""" | ||||
|     path.mkdir(exist_ok=True) | ||||
| @@ -164,6 +166,106 @@ def validate_password(path: Path, password: str | None) -> bool: | ||||
|     return False | ||||
|  | ||||
|  | ||||
| class AsyncIteratorReader: | ||||
|     """Wrap an AsyncIterator.""" | ||||
|  | ||||
|     def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None: | ||||
|         """Initialize the wrapper.""" | ||||
|         self._aborted = False | ||||
|         self._hass = hass | ||||
|         self._stream = stream | ||||
|         self._buffer: bytes | None = None | ||||
|         self._next_future: Future[bytes | None] | None = None | ||||
|         self._pos: int = 0 | ||||
|  | ||||
|     async def _next(self) -> bytes | None: | ||||
|         """Get the next chunk from the iterator.""" | ||||
|         return await anext(self._stream, None) | ||||
|  | ||||
|     def abort(self) -> None: | ||||
|         """Abort the reader.""" | ||||
|         self._aborted = True | ||||
|         if self._next_future is not None: | ||||
|             self._next_future.cancel() | ||||
|  | ||||
|     def read(self, n: int = -1, /) -> bytes: | ||||
|         """Read data from the iterator.""" | ||||
|         result = bytearray() | ||||
|         while n < 0 or len(result) < n: | ||||
|             if not self._buffer: | ||||
|                 self._next_future = asyncio.run_coroutine_threadsafe( | ||||
|                     self._next(), self._hass.loop | ||||
|                 ) | ||||
|                 if self._aborted: | ||||
|                     self._next_future.cancel() | ||||
|                     raise AbortCipher | ||||
|                 try: | ||||
|                     self._buffer = self._next_future.result() | ||||
|                 except CancelledError as err: | ||||
|                     raise AbortCipher from err | ||||
|                 self._pos = 0 | ||||
|             if not self._buffer: | ||||
|                 # The stream is exhausted | ||||
|                 break | ||||
|             chunk = self._buffer[self._pos : self._pos + n] | ||||
|             result.extend(chunk) | ||||
|             n -= len(chunk) | ||||
|             self._pos += len(chunk) | ||||
|             if self._pos == len(self._buffer): | ||||
|                 self._buffer = None | ||||
|         return bytes(result) | ||||
|  | ||||
|     def close(self) -> None: | ||||
|         """Close the iterator.""" | ||||
|  | ||||
|  | ||||
| class AsyncIteratorWriter: | ||||
|     """Wrap an AsyncIterator.""" | ||||
|  | ||||
|     def __init__(self, hass: HomeAssistant) -> None: | ||||
|         """Initialize the wrapper.""" | ||||
|         self._aborted = False | ||||
|         self._hass = hass | ||||
|         self._pos: int = 0 | ||||
|         self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1) | ||||
|         self._write_future: Future[bytes | None] | None = None | ||||
|  | ||||
|     def __aiter__(self) -> Self: | ||||
|         """Return the iterator.""" | ||||
|         return self | ||||
|  | ||||
|     async def __anext__(self) -> bytes: | ||||
|         """Get the next chunk from the iterator.""" | ||||
|         if data := await self._queue.get(): | ||||
|             return data | ||||
|         raise StopAsyncIteration | ||||
|  | ||||
|     def abort(self) -> None: | ||||
|         """Abort the writer.""" | ||||
|         self._aborted = True | ||||
|         if self._write_future is not None: | ||||
|             self._write_future.cancel() | ||||
|  | ||||
|     def tell(self) -> int: | ||||
|         """Return the current position in the iterator.""" | ||||
|         return self._pos | ||||
|  | ||||
|     def write(self, s: bytes, /) -> int: | ||||
|         """Write data to the iterator.""" | ||||
|         self._write_future = asyncio.run_coroutine_threadsafe( | ||||
|             self._queue.put(s), self._hass.loop | ||||
|         ) | ||||
|         if self._aborted: | ||||
|             self._write_future.cancel() | ||||
|             raise AbortCipher | ||||
|         try: | ||||
|             self._write_future.result() | ||||
|         except CancelledError as err: | ||||
|             raise AbortCipher from err | ||||
|         self._pos += len(s) | ||||
|         return len(s) | ||||
|  | ||||
|  | ||||
| def validate_password_stream( | ||||
|     input_stream: IO[bytes], | ||||
|     password: str | None, | ||||
| @@ -240,7 +342,7 @@ def decrypt_backup( | ||||
|         finally: | ||||
|             # Write an empty chunk to signal the end of the stream | ||||
|             output_stream.write(b"") | ||||
|     except Abort: | ||||
|     except AbortCipher: | ||||
|         LOGGER.debug("Cipher operation aborted") | ||||
|     finally: | ||||
|         on_done(error) | ||||
| @@ -328,7 +430,7 @@ def encrypt_backup( | ||||
|         finally: | ||||
|             # Write an empty chunk to signal the end of the stream | ||||
|             output_stream.write(b"") | ||||
|     except Abort: | ||||
|     except AbortCipher: | ||||
|         LOGGER.debug("Cipher operation aborted") | ||||
|     finally: | ||||
|         on_done(error) | ||||
| @@ -455,8 +557,8 @@ class _CipherBackupStreamer: | ||||
|             self._hass.loop.call_soon_threadsafe(worker_status.done.set) | ||||
|  | ||||
|         stream = await self._open_stream() | ||||
|         reader = AsyncIteratorReader(self._hass.loop, stream) | ||||
|         writer = AsyncIteratorWriter(self._hass.loop) | ||||
|         reader = AsyncIteratorReader(self._hass, stream) | ||||
|         writer = AsyncIteratorWriter(self._hass) | ||||
|         worker = threading.Thread( | ||||
|             target=self._cipher_func, | ||||
|             args=[ | ||||
|   | ||||
| @@ -73,12 +73,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) | ||||
|     # Add the websocket and API client | ||||
|     entry.runtime_data = BangOlufsenData(websocket, client) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||||
|  | ||||
|     # Start WebSocket connection once the platforms have been loaded. | ||||
|     # This ensures that the initial WebSocket notifications are dispatched to entities | ||||
|     # Start WebSocket connection | ||||
|     await client.connect_notifications(remote_control=True, reconnect=True) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -125,8 +125,7 @@ async def async_setup_entry( | ||||
|     async_add_entities( | ||||
|         new_entities=[ | ||||
|             BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) | ||||
|         ], | ||||
|         update_before_add=True, | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|     # Register actions. | ||||
| @@ -267,8 +266,34 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): | ||||
|             self._software_status.software_version, | ||||
|         ) | ||||
|  | ||||
|         # Get overall device state once. This is handled by WebSocket events the rest of the time. | ||||
|         product_state = await self._client.get_product_state() | ||||
|  | ||||
|         # Get volume information. | ||||
|         if product_state.volume: | ||||
|             self._volume = product_state.volume | ||||
|  | ||||
|         # Get all playback information. | ||||
|         # Ensure that the metadata is not None upon startup | ||||
|         if product_state.playback: | ||||
|             if product_state.playback.metadata: | ||||
|                 self._playback_metadata = product_state.playback.metadata | ||||
|                 self._remote_leader = product_state.playback.metadata.remote_leader | ||||
|             if product_state.playback.progress: | ||||
|                 self._playback_progress = product_state.playback.progress | ||||
|             if product_state.playback.source: | ||||
|                 self._source_change = product_state.playback.source | ||||
|             if product_state.playback.state: | ||||
|                 self._playback_state = product_state.playback.state | ||||
|                 # Set initial state | ||||
|                 if self._playback_state.value: | ||||
|                     self._state = self._playback_state.value | ||||
|  | ||||
|         self._attr_media_position_updated_at = utcnow() | ||||
|  | ||||
|         # Get the highest resolution available of the given images. | ||||
|         self._media_image = get_highest_resolution_artwork(self._playback_metadata) | ||||
|  | ||||
|         # If the device has been updated with new sources, then the API will fail here. | ||||
|         await self._async_update_sources() | ||||
|  | ||||
|   | ||||
| @@ -3,12 +3,16 @@ beolink_allstandby: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|  | ||||
| beolink_expand: | ||||
|   target: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|   fields: | ||||
|     all_discovered: | ||||
|       required: false | ||||
| @@ -33,6 +37,8 @@ beolink_join: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|   fields: | ||||
|     jid_options: | ||||
|       collapsed: false | ||||
| @@ -65,12 +71,16 @@ beolink_leave: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|  | ||||
| beolink_unexpand: | ||||
|   target: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|   fields: | ||||
|     jid_options: | ||||
|       collapsed: false | ||||
|   | ||||
| @@ -272,13 +272,6 @@ async def async_setup_entry( | ||||
|     observations: list[ConfigType] = [ | ||||
|         dict(subentry.data) for subentry in config_entry.subentries.values() | ||||
|     ] | ||||
|  | ||||
|     for observation in observations: | ||||
|         if observation[CONF_PLATFORM] == CONF_TEMPLATE: | ||||
|             observation[CONF_VALUE_TEMPLATE] = Template( | ||||
|                 observation[CONF_VALUE_TEMPLATE], hass | ||||
|             ) | ||||
|  | ||||
|     prior: float = config[CONF_PRIOR] | ||||
|     probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD] | ||||
|     device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) | ||||
|   | ||||
| @@ -13,30 +13,20 @@ from bluecurrent_api.exceptions import ( | ||||
|     RequestLimitReached, | ||||
|     WebsocketError, | ||||
| ) | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry, ConfigEntryState | ||||
| from homeassistant.const import CONF_API_TOKEN, CONF_DEVICE_ID, Platform | ||||
| from homeassistant.core import HomeAssistant, ServiceCall | ||||
| from homeassistant.exceptions import ( | ||||
|     ConfigEntryAuthFailed, | ||||
|     ConfigEntryNotReady, | ||||
|     ServiceValidationError, | ||||
| ) | ||||
| from homeassistant.helpers import config_validation as cv, device_registry as dr | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_API_TOKEN, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady | ||||
| from homeassistant.helpers.dispatcher import async_dispatcher_send | ||||
| from homeassistant.helpers.typing import ConfigType | ||||
|  | ||||
| from .const import ( | ||||
|     BCU_APP, | ||||
|     CHARGEPOINT_SETTINGS, | ||||
|     CHARGEPOINT_STATUS, | ||||
|     CHARGING_CARD_ID, | ||||
|     DOMAIN, | ||||
|     EVSE_ID, | ||||
|     LOGGER, | ||||
|     PLUG_AND_CHARGE, | ||||
|     SERVICE_START_CHARGE_SESSION, | ||||
|     VALUE, | ||||
| ) | ||||
|  | ||||
| @@ -44,7 +34,6 @@ type BlueCurrentConfigEntry = ConfigEntry[Connector] | ||||
|  | ||||
| PLATFORMS = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] | ||||
| CHARGE_POINTS = "CHARGE_POINTS" | ||||
| CHARGE_CARDS = "CHARGE_CARDS" | ||||
| DATA = "data" | ||||
| DELAY = 5 | ||||
|  | ||||
| @@ -52,16 +41,6 @@ GRID = "GRID" | ||||
| OBJECT = "object" | ||||
| VALUE_TYPES = [CHARGEPOINT_STATUS, CHARGEPOINT_SETTINGS] | ||||
|  | ||||
| CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) | ||||
|  | ||||
| SERVICE_START_CHARGE_SESSION_SCHEMA = vol.Schema( | ||||
|     { | ||||
|         vol.Required(CONF_DEVICE_ID): cv.string, | ||||
|         # When no charging card is provided, use no charging card (BCU_APP = no charging card). | ||||
|         vol.Optional(CHARGING_CARD_ID, default=BCU_APP): cv.string, | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| async def async_setup_entry( | ||||
|     hass: HomeAssistant, config_entry: BlueCurrentConfigEntry | ||||
| @@ -88,66 +67,6 @@ async def async_setup_entry( | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: | ||||
|     """Set up Blue Current.""" | ||||
|  | ||||
|     async def start_charge_session(service_call: ServiceCall) -> None: | ||||
|         """Start a charge session with the provided device and charge card ID.""" | ||||
|         # When no charge card is provided, use the default charge card set in the config flow. | ||||
|         charging_card_id = service_call.data[CHARGING_CARD_ID] | ||||
|         device_id = service_call.data[CONF_DEVICE_ID] | ||||
|  | ||||
|         # Get the device based on the given device ID. | ||||
|         device = dr.async_get(hass).devices.get(device_id) | ||||
|  | ||||
|         if device is None: | ||||
|             raise ServiceValidationError( | ||||
|                 translation_domain=DOMAIN, translation_key="invalid_device_id" | ||||
|             ) | ||||
|  | ||||
|         blue_current_config_entry: ConfigEntry | None = None | ||||
|  | ||||
|         for config_entry_id in device.config_entries: | ||||
|             config_entry = hass.config_entries.async_get_entry(config_entry_id) | ||||
|             if not config_entry or config_entry.domain != DOMAIN: | ||||
|                 # Not the blue_current config entry. | ||||
|                 continue | ||||
|  | ||||
|             if config_entry.state is not ConfigEntryState.LOADED: | ||||
|                 raise ServiceValidationError( | ||||
|                     translation_domain=DOMAIN, translation_key="config_entry_not_loaded" | ||||
|                 ) | ||||
|  | ||||
|             blue_current_config_entry = config_entry | ||||
|             break | ||||
|  | ||||
|         if not blue_current_config_entry: | ||||
|             # The device is not connected to a valid blue_current config entry. | ||||
|             raise ServiceValidationError( | ||||
|                 translation_domain=DOMAIN, translation_key="no_config_entry" | ||||
|             ) | ||||
|  | ||||
|         connector = blue_current_config_entry.runtime_data | ||||
|  | ||||
|         # Get the evse_id from the identifier of the device. | ||||
|         evse_id = next( | ||||
|             identifier[1] | ||||
|             for identifier in device.identifiers | ||||
|             if identifier[0] == DOMAIN | ||||
|         ) | ||||
|  | ||||
|         await connector.client.start_session(evse_id, charging_card_id) | ||||
|  | ||||
|     hass.services.async_register( | ||||
|         DOMAIN, | ||||
|         SERVICE_START_CHARGE_SESSION, | ||||
|         start_charge_session, | ||||
|         SERVICE_START_CHARGE_SESSION_SCHEMA, | ||||
|     ) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_unload_entry( | ||||
|     hass: HomeAssistant, config_entry: BlueCurrentConfigEntry | ||||
| ) -> bool: | ||||
| @@ -168,7 +87,6 @@ class Connector: | ||||
|         self.client = client | ||||
|         self.charge_points: dict[str, dict] = {} | ||||
|         self.grid: dict[str, Any] = {} | ||||
|         self.charge_cards: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|     async def on_data(self, message: dict) -> None: | ||||
|         """Handle received data.""" | ||||
|   | ||||
| @@ -8,12 +8,6 @@ LOGGER = logging.getLogger(__package__) | ||||
|  | ||||
| EVSE_ID = "evse_id" | ||||
| MODEL_TYPE = "model_type" | ||||
| CARD = "card" | ||||
| UID = "uid" | ||||
| BCU_APP = "BCU-APP" | ||||
| WITHOUT_CHARGING_CARD = "without_charging_card" | ||||
| CHARGING_CARD_ID = "charging_card_id" | ||||
| SERVICE_START_CHARGE_SESSION = "start_charge_session" | ||||
| PLUG_AND_CHARGE = "plug_and_charge" | ||||
| VALUE = "value" | ||||
| PERMISSION = "permission" | ||||
|   | ||||
| @@ -42,10 +42,5 @@ | ||||
|         "default": "mdi:lock" | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "services": { | ||||
|     "start_charge_session": { | ||||
|       "service": "mdi:play" | ||||
|     } | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -1,12 +0,0 @@ | ||||
| start_charge_session: | ||||
|   fields: | ||||
|     device_id: | ||||
|       selector: | ||||
|         device: | ||||
|           integration: blue_current | ||||
|       required: true | ||||
|  | ||||
|     charging_card_id: | ||||
|       selector: | ||||
|         text: | ||||
|       required: false | ||||
| @@ -22,16 +22,6 @@ | ||||
|       "wrong_account": "Wrong account: Please authenticate with the API token for {email}." | ||||
|     } | ||||
|   }, | ||||
|   "options": { | ||||
|     "step": { | ||||
|       "init": { | ||||
|         "data": { | ||||
|           "card": "Card" | ||||
|         }, | ||||
|         "description": "Select the default charging card you want to use" | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
|     "sensor": { | ||||
|       "activity": { | ||||
| @@ -146,39 +136,5 @@ | ||||
|         "name": "Block charge point" | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "selector": { | ||||
|     "select_charging_card": { | ||||
|       "options": { | ||||
|         "without_charging_card": "Without charging card" | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "services": { | ||||
|     "start_charge_session": { | ||||
|       "name": "Start charge session", | ||||
|       "description": "Starts a new charge session on a specified charge point.", | ||||
|       "fields": { | ||||
|         "charging_card_id": { | ||||
|           "name": "Charging card ID", | ||||
|           "description": "Optional charging card ID that will be used to start a charge session. When not provided, no charging card will be used." | ||||
|         }, | ||||
|         "device_id": { | ||||
|           "name": "Device ID", | ||||
|           "description": "The ID of the Blue Current charge point." | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "exceptions": { | ||||
|     "invalid_device_id": { | ||||
|       "message": "Invalid device ID given." | ||||
|     }, | ||||
|     "config_entry_not_loaded": { | ||||
|       "message": "Config entry not loaded." | ||||
|     }, | ||||
|     "no_config_entry": { | ||||
|       "message": "Device has not a valid blue_current config entry." | ||||
|     } | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -10,7 +10,6 @@ from asyncio import Future | ||||
| from collections.abc import Callable, Iterable | ||||
| from typing import TYPE_CHECKING, cast | ||||
|  | ||||
| from bleak import BleakScanner | ||||
| from habluetooth import ( | ||||
|     BaseHaScanner, | ||||
|     BluetoothScannerDevice, | ||||
| @@ -39,16 +38,13 @@ def _get_manager(hass: HomeAssistant) -> HomeAssistantBluetoothManager: | ||||
|  | ||||
|  | ||||
| @hass_callback | ||||
| def async_get_scanner(hass: HomeAssistant) -> BleakScanner: | ||||
|     """Return a HaBleakScannerWrapper cast to BleakScanner. | ||||
| def async_get_scanner(hass: HomeAssistant) -> HaBleakScannerWrapper: | ||||
|     """Return a HaBleakScannerWrapper. | ||||
|  | ||||
|     This is a wrapper around our BleakScanner singleton that allows | ||||
|     multiple integrations to share the same BleakScanner. | ||||
|  | ||||
|     The wrapper is cast to BleakScanner for type compatibility with | ||||
|     libraries expecting a BleakScanner instance. | ||||
|     """ | ||||
|     return cast(BleakScanner, HaBleakScannerWrapper()) | ||||
|     return HaBleakScannerWrapper() | ||||
|  | ||||
|  | ||||
| @hass_callback | ||||
|   | ||||
| @@ -205,7 +205,6 @@ class BringActivityCoordinator(BringBaseCoordinator[dict[str, BringActivityData] | ||||
|  | ||||
|     async def _async_update_data(self) -> dict[str, BringActivityData]: | ||||
|         """Fetch activity data from bring.""" | ||||
|         self.lists = self.coordinator.lists | ||||
|  | ||||
|         list_dict: dict[str, BringActivityData] = {} | ||||
|         for lst in self.lists: | ||||
|   | ||||
| @@ -43,7 +43,7 @@ async def async_setup_entry( | ||||
|             ) | ||||
|             lists_added |= new_lists | ||||
|  | ||||
|     coordinator.data.async_add_listener(add_entities) | ||||
|     coordinator.activity.async_add_listener(add_entities) | ||||
|     add_entities() | ||||
|  | ||||
|  | ||||
| @@ -67,8 +67,7 @@ class BringEventEntity(BringBaseEntity, EventEntity): | ||||
|  | ||||
|     def _async_handle_event(self) -> None: | ||||
|         """Handle the activity event.""" | ||||
|         if (bring_list := self.coordinator.data.get(self._list_uuid)) is None: | ||||
|             return | ||||
|         bring_list = self.coordinator.data[self._list_uuid] | ||||
|         last_event_triggered = self.state | ||||
|         if bring_list.activity.timeline and ( | ||||
|             last_event_triggered is None | ||||
|   | ||||
| @@ -315,7 +315,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: | ||||
|     hass.http.register_view(CalendarListView(component)) | ||||
|     hass.http.register_view(CalendarEventView(component)) | ||||
|  | ||||
|     frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar") | ||||
|     frontend.async_register_built_in_panel( | ||||
|         hass, "calendar", "calendar", "hass:calendar" | ||||
|     ) | ||||
|  | ||||
|     websocket_api.async_register_command(hass, handle_calendar_event_create) | ||||
|     websocket_api.async_register_command(hass, handle_calendar_event_delete) | ||||
|   | ||||
| @@ -51,6 +51,12 @@ from homeassistant.const import ( | ||||
| from homeassistant.core import Event, HomeAssistant, ServiceCall, callback | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import config_validation as cv, issue_registry as ir | ||||
| from homeassistant.helpers.deprecation import ( | ||||
|     DeprecatedConstantEnum, | ||||
|     all_with_deprecated_constants, | ||||
|     check_if_deprecated_constant, | ||||
|     dir_with_deprecated_constants, | ||||
| ) | ||||
| from homeassistant.helpers.entity import Entity, EntityDescription | ||||
| from homeassistant.helpers.entity_component import EntityComponent | ||||
| from homeassistant.helpers.event import async_track_time_interval | ||||
| @@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename" | ||||
| ATTR_MEDIA_PLAYER: Final = "media_player" | ||||
| ATTR_FORMAT: Final = "format" | ||||
|  | ||||
| # These constants are deprecated as of Home Assistant 2024.10 | ||||
| # Please use the StreamType enum instead. | ||||
| _DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10") | ||||
| _DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10") | ||||
| _DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10") | ||||
|  | ||||
|  | ||||
| class CameraEntityFeature(IntFlag): | ||||
|     """Supported features of the camera entity.""" | ||||
| @@ -1105,3 +1117,11 @@ async def async_handle_record_service( | ||||
|         duration=service_call.data[CONF_DURATION], | ||||
|         lookback=service_call.data[CONF_LOOKBACK], | ||||
|     ) | ||||
|  | ||||
|  | ||||
| # These can be removed if no deprecated constant are in this module anymore | ||||
| __getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) | ||||
| __dir__ = partial( | ||||
|     dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] | ||||
| ) | ||||
| __all__ = all_with_deprecated_constants(globals()) | ||||
|   | ||||
| @@ -53,6 +53,7 @@ from .const import ( | ||||
|     CONF_ACME_SERVER, | ||||
|     CONF_ALEXA, | ||||
|     CONF_ALIASES, | ||||
|     CONF_CLOUDHOOK_SERVER, | ||||
|     CONF_COGNITO_CLIENT_ID, | ||||
|     CONF_ENTITY_CONFIG, | ||||
|     CONF_FILTER, | ||||
| @@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema( | ||||
|                 vol.Optional(CONF_ACCOUNT_LINK_SERVER): str, | ||||
|                 vol.Optional(CONF_ACCOUNTS_SERVER): str, | ||||
|                 vol.Optional(CONF_ACME_SERVER): str, | ||||
|                 vol.Optional(CONF_CLOUDHOOK_SERVER): str, | ||||
|                 vol.Optional(CONF_RELAYER_SERVER): str, | ||||
|                 vol.Optional(CONF_REMOTESTATE_SERVER): str, | ||||
|                 vol.Optional(CONF_SERVICEHANDLERS_SERVER): str, | ||||
|   | ||||
| @@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id" | ||||
| CONF_ACCOUNT_LINK_SERVER = "account_link_server" | ||||
| CONF_ACCOUNTS_SERVER = "accounts_server" | ||||
| CONF_ACME_SERVER = "acme_server" | ||||
| CONF_CLOUDHOOK_SERVER = "cloudhook_server" | ||||
| CONF_RELAYER_SERVER = "relayer_server" | ||||
| CONF_REMOTESTATE_SERVER = "remotestate_server" | ||||
| CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server" | ||||
|   | ||||
| @@ -37,10 +37,6 @@ from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import config_validation as cv | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.dispatcher import async_dispatcher_send | ||||
| from homeassistant.loader import ( | ||||
|     async_get_custom_components, | ||||
|     async_get_loaded_integration, | ||||
| ) | ||||
| from homeassistant.util.location import async_detect_location_info | ||||
|  | ||||
| from .alexa_config import entity_supported as entity_supported_by_alexa | ||||
| @@ -435,79 +431,6 @@ class DownloadSupportPackageView(HomeAssistantView): | ||||
|     url = "/api/cloud/support_package" | ||||
|     name = "api:cloud:support_package" | ||||
|  | ||||
|     async def _get_integration_info(self, hass: HomeAssistant) -> dict[str, Any]: | ||||
|         """Collect information about active and custom integrations.""" | ||||
|         # Get loaded components from hass.config.components | ||||
|         loaded_components = hass.config.components.copy() | ||||
|  | ||||
|         # Get custom integrations | ||||
|         custom_domains = set() | ||||
|         with suppress(Exception): | ||||
|             custom_domains = set(await async_get_custom_components(hass)) | ||||
|  | ||||
|         # Separate built-in and custom integrations | ||||
|         builtin_integrations = [] | ||||
|         custom_integrations = [] | ||||
|  | ||||
|         for domain in sorted(loaded_components): | ||||
|             try: | ||||
|                 integration = async_get_loaded_integration(hass, domain) | ||||
|             except Exception:  # noqa: BLE001 | ||||
|                 # Broad exception catch for robustness in support package | ||||
|                 # generation. If we can't get integration info, | ||||
|                 # just add the domain | ||||
|                 if domain in custom_domains: | ||||
|                     custom_integrations.append( | ||||
|                         { | ||||
|                             "domain": domain, | ||||
|                             "name": "Unknown", | ||||
|                             "version": "Unknown", | ||||
|                             "documentation": "Unknown", | ||||
|                         } | ||||
|                     ) | ||||
|                 else: | ||||
|                     builtin_integrations.append( | ||||
|                         { | ||||
|                             "domain": domain, | ||||
|                             "name": "Unknown", | ||||
|                         } | ||||
|                     ) | ||||
|             else: | ||||
|                 if domain in custom_domains: | ||||
|                     # This is a custom integration | ||||
|                     # include version and documentation link | ||||
|                     version = ( | ||||
|                         str(integration.version) if integration.version else "Unknown" | ||||
|                     ) | ||||
|                     if not (documentation := integration.documentation): | ||||
|                         documentation = "Unknown" | ||||
|  | ||||
|                     custom_integrations.append( | ||||
|                         { | ||||
|                             "domain": domain, | ||||
|                             "name": integration.name, | ||||
|                             "version": version, | ||||
|                             "documentation": documentation, | ||||
|                         } | ||||
|                     ) | ||||
|                 else: | ||||
|                     # This is a built-in integration. | ||||
|                     # No version needed, as it is always the same as the | ||||
|                     # Home Assistant version | ||||
|                     builtin_integrations.append( | ||||
|                         { | ||||
|                             "domain": domain, | ||||
|                             "name": integration.name, | ||||
|                         } | ||||
|                     ) | ||||
|  | ||||
|         return { | ||||
|             "builtin_count": len(builtin_integrations), | ||||
|             "builtin_integrations": builtin_integrations, | ||||
|             "custom_count": len(custom_integrations), | ||||
|             "custom_integrations": custom_integrations, | ||||
|         } | ||||
|  | ||||
|     async def _generate_markdown( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
| @@ -530,38 +453,6 @@ class DownloadSupportPackageView(HomeAssistantView): | ||||
|         markdown = "## System Information\n\n" | ||||
|         markdown += get_domain_table_markdown(hass_info) | ||||
|  | ||||
|         # Add integration information | ||||
|         try: | ||||
|             integration_info = await self._get_integration_info(hass) | ||||
|         except Exception:  # noqa: BLE001 | ||||
|             # Broad exception catch for robustness in support package generation | ||||
|             # If there's any error getting integration info, just note it | ||||
|             markdown += "## Active integrations\n\n" | ||||
|             markdown += "Unable to collect integration information\n\n" | ||||
|         else: | ||||
|             markdown += "## Active Integrations\n\n" | ||||
|             markdown += f"Built-in integrations: {integration_info['builtin_count']}\n" | ||||
|             markdown += f"Custom integrations: {integration_info['custom_count']}\n\n" | ||||
|  | ||||
|             # Built-in integrations | ||||
|             if integration_info["builtin_integrations"]: | ||||
|                 markdown += "<details><summary>Built-in integrations</summary>\n\n" | ||||
|                 markdown += "Domain | Name\n" | ||||
|                 markdown += "--- | ---\n" | ||||
|                 for integration in integration_info["builtin_integrations"]: | ||||
|                     markdown += f"{integration['domain']} | {integration['name']}\n" | ||||
|                 markdown += "\n</details>\n\n" | ||||
|  | ||||
|             # Custom integrations | ||||
|             if integration_info["custom_integrations"]: | ||||
|                 markdown += "<details><summary>Custom integrations</summary>\n\n" | ||||
|                 markdown += "Domain | Name | Version | Documentation\n" | ||||
|                 markdown += "--- | --- | --- | ---\n" | ||||
|                 for integration in integration_info["custom_integrations"]: | ||||
|                     doc_url = integration.get("documentation") or "N/A" | ||||
|                     markdown += f"{integration['domain']} | {integration['name']} | {integration['version']} | {doc_url}\n" | ||||
|                 markdown += "\n</details>\n\n" | ||||
|  | ||||
|         for domain, domain_info in domains_info.items(): | ||||
|             domain_info_md = get_domain_table_markdown(domain_info) | ||||
|             markdown += ( | ||||
|   | ||||
| @@ -13,6 +13,6 @@ | ||||
|   "integration_type": "system", | ||||
|   "iot_class": "cloud_push", | ||||
|   "loggers": ["acme", "hass_nabucasa", "snitun"], | ||||
|   "requirements": ["hass-nabucasa==1.2.0"], | ||||
|   "requirements": ["hass-nabucasa==1.1.1"], | ||||
|   "single_config_entry": true | ||||
| } | ||||
|   | ||||
| @@ -25,11 +25,7 @@ async def async_subscription_info(cloud: Cloud[CloudClient]) -> SubscriptionInfo | ||||
|             return await cloud.payments.subscription_info() | ||||
|     except PaymentsApiError as exception: | ||||
|         _LOGGER.error("Failed to fetch subscription information - %s", exception) | ||||
|     except TimeoutError: | ||||
|         _LOGGER.error( | ||||
|             "A timeout of %s was reached while trying to fetch subscription information", | ||||
|             REQUEST_TIMEOUT, | ||||
|         ) | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,106 +0,0 @@ | ||||
| rules: | ||||
|   # Bronze | ||||
|   action-setup: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any actions. | ||||
|   appropriate-polling: done | ||||
|   brands: done | ||||
|   common-modules: done | ||||
|   config-flow-test-coverage: | ||||
|     status: todo | ||||
|     comment: | | ||||
|       Stale docstring and test name: `test_form_home` and reusing result. | ||||
|       Extract `async_setup_entry` into own fixture. | ||||
|       Avoid importing `config_flow` in tests. | ||||
|       Test reauth with errors | ||||
|   config-flow: | ||||
|     status: todo | ||||
|     comment: | | ||||
|       The config flow misses data descriptions. | ||||
|       Remove URLs from data descriptions, they should be replaced with placeholders. | ||||
|       Make use of Electricity Maps zone keys in country code as dropdown. | ||||
|       Make use of location selector for coordinates. | ||||
|   dependency-transparency: done | ||||
|   docs-actions: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any actions. | ||||
|   docs-high-level-description: done | ||||
|   docs-installation-instructions: done | ||||
|   docs-removal-instructions: done | ||||
|   entity-event-setup: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       Entities of this integration do not explicitly subscribe to events. | ||||
|   entity-unique-id: done | ||||
|   has-entity-name: done | ||||
|   runtime-data: done | ||||
|   test-before-configure: done | ||||
|   test-before-setup: done | ||||
|   unique-config-entry: todo | ||||
|  | ||||
|   # Silver | ||||
|   action-exceptions: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any actions. | ||||
|   config-entry-unloading: done | ||||
|   docs-configuration-parameters: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any additional options. | ||||
|   docs-installation-parameters: done | ||||
|   entity-unavailable: done | ||||
|   integration-owner: done | ||||
|   log-when-unavailable: done | ||||
|   parallel-updates: todo | ||||
|   reauthentication-flow: done | ||||
|   test-coverage: | ||||
|     status: todo | ||||
|     comment: | | ||||
|       Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})` | ||||
|       `test_sensor` could use `snapshot_platform` | ||||
|  | ||||
|   # Gold | ||||
|   devices: done | ||||
|   diagnostics: done | ||||
|   discovery-update-info: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration cannot be discovered, it is a connecting to a cloud service. | ||||
|   discovery: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration cannot be discovered, it is a connecting to a cloud service. | ||||
|   docs-data-update: done | ||||
|   docs-examples: done | ||||
|   docs-known-limitations: done | ||||
|   docs-supported-devices: done | ||||
|   docs-supported-functions: done | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   dynamic-devices: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration connects to a single service per configuration entry. | ||||
|   entity-category: done | ||||
|   entity-device-class: done | ||||
|   entity-disabled-by-default: done | ||||
|   entity-translations: done | ||||
|   exception-translations: todo | ||||
|   icon-translations: todo | ||||
|   reconfiguration-flow: todo | ||||
|   repair-issues: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration does not raise any repairable issues. | ||||
|   stale-devices: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration connect to a single device per configuration entry. | ||||
|  | ||||
|   # Platinum | ||||
|   async-dependency: done | ||||
|   inject-websession: done | ||||
|   strict-typing: done | ||||
| @@ -29,24 +29,11 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) | ||||
|  | ||||
|     known_devices: set[int] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data["alarm_zones"]) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|                 ComelitVedoBinarySensorEntity( | ||||
|                     coordinator, device, config_entry.entry_id | ||||
|                 ) | ||||
|         ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id) | ||||
|         for device in coordinator.data["alarm_zones"].values() | ||||
|                 if device.index in new_devices | ||||
|     ) | ||||
|  | ||||
|     _check_device() | ||||
|     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class ComelitVedoBinarySensorEntity( | ||||
|     CoordinatorEntity[ComelitVedoSystem], BinarySensorEntity | ||||
|   | ||||
| @@ -25,27 +25,23 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN | ||||
| from .utils import async_client_session | ||||
|  | ||||
| DEFAULT_HOST = "192.168.1.252" | ||||
| DEFAULT_PIN = "111111" | ||||
| DEFAULT_PIN = 111111 | ||||
|  | ||||
|  | ||||
| pin_regex = r"^[0-9]{4,10}$" | ||||
|  | ||||
| USER_SCHEMA = vol.Schema( | ||||
|     { | ||||
|         vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string, | ||||
|         vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port, | ||||
|         vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex), | ||||
|         vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int, | ||||
|         vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST), | ||||
|     } | ||||
| ) | ||||
| STEP_REAUTH_DATA_SCHEMA = vol.Schema( | ||||
|     {vol.Required(CONF_PIN): cv.matches_regex(pin_regex)} | ||||
| ) | ||||
| STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int}) | ||||
| STEP_RECONFIGURE = vol.Schema( | ||||
|     { | ||||
|         vol.Required(CONF_HOST): cv.string, | ||||
|         vol.Required(CONF_PORT): cv.port, | ||||
|         vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex), | ||||
|         vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int, | ||||
|     } | ||||
| ) | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|  | ||||
| from abc import abstractmethod | ||||
| from datetime import timedelta | ||||
| from typing import Any, TypeVar | ||||
| from typing import TypeVar | ||||
|  | ||||
| from aiocomelit.api import ( | ||||
|     AlarmDataObject, | ||||
| @@ -13,16 +13,7 @@ from aiocomelit.api import ( | ||||
|     ComelitVedoAreaObject, | ||||
|     ComelitVedoZoneObject, | ||||
| ) | ||||
| from aiocomelit.const import ( | ||||
|     BRIDGE, | ||||
|     CLIMATE, | ||||
|     COVER, | ||||
|     IRRIGATION, | ||||
|     LIGHT, | ||||
|     OTHER, | ||||
|     SCENARIO, | ||||
|     VEDO, | ||||
| ) | ||||
| from aiocomelit.const import BRIDGE, VEDO | ||||
| from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData | ||||
| from aiohttp import ClientSession | ||||
|  | ||||
| @@ -120,32 +111,6 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]): | ||||
|     async def _async_update_system_data(self) -> T: | ||||
|         """Class method for updating data.""" | ||||
|  | ||||
|     async def _async_remove_stale_devices( | ||||
|         self, | ||||
|         previous_list: dict[int, Any], | ||||
|         current_list: dict[int, Any], | ||||
|         dev_type: str, | ||||
|     ) -> None: | ||||
|         """Remove stale devices.""" | ||||
|         device_registry = dr.async_get(self.hass) | ||||
|  | ||||
|         for i in previous_list: | ||||
|             if i not in current_list: | ||||
|                 _LOGGER.debug( | ||||
|                     "Detected change in %s devices: index %s removed", | ||||
|                     dev_type, | ||||
|                     i, | ||||
|                 ) | ||||
|                 identifier = f"{self.config_entry.entry_id}-{dev_type}-{i}" | ||||
|                 device = device_registry.async_get_device( | ||||
|                     identifiers={(DOMAIN, identifier)} | ||||
|                 ) | ||||
|                 if device: | ||||
|                     device_registry.async_update_device( | ||||
|                         device_id=device.id, | ||||
|                         remove_config_entry_id=self.config_entry.entry_id, | ||||
|                     ) | ||||
|  | ||||
|  | ||||
| class ComelitSerialBridge( | ||||
|     ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]] | ||||
| @@ -172,15 +137,7 @@ class ComelitSerialBridge( | ||||
|         self, | ||||
|     ) -> dict[str, dict[int, ComelitSerialBridgeObject]]: | ||||
|         """Specific method for updating data.""" | ||||
|         data = await self.api.get_all_devices() | ||||
|  | ||||
|         if self.data: | ||||
|             for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO): | ||||
|                 await self._async_remove_stale_devices( | ||||
|                     self.data[dev_type], data[dev_type], dev_type | ||||
|                 ) | ||||
|  | ||||
|         return data | ||||
|         return await self.api.get_all_devices() | ||||
|  | ||||
|  | ||||
| class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]): | ||||
| @@ -206,14 +163,4 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]): | ||||
|         self, | ||||
|     ) -> AlarmDataObject: | ||||
|         """Specific method for updating data.""" | ||||
|         data = await self.api.get_all_areas_and_zones() | ||||
|  | ||||
|         if self.data: | ||||
|             for obj_type in ("alarm_areas", "alarm_zones"): | ||||
|                 await self._async_remove_stale_devices( | ||||
|                     self.data[obj_type], | ||||
|                     data[obj_type], | ||||
|                     "area" if obj_type == "alarm_areas" else "zone", | ||||
|                 ) | ||||
|  | ||||
|         return data | ||||
|         return await self.api.get_all_areas_and_zones() | ||||
|   | ||||
| @@ -29,22 +29,11 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||
|  | ||||
|     known_devices: set[int] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data[COVER]) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|         ComelitCoverEntity(coordinator, device, config_entry.entry_id) | ||||
|         for device in coordinator.data[COVER].values() | ||||
|                 if device.index in new_devices | ||||
|     ) | ||||
|  | ||||
|     _check_device() | ||||
|     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | ||||
|     """Cover device.""" | ||||
|   | ||||
| @@ -27,22 +27,11 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||
|  | ||||
|     known_devices: set[int] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data[LIGHT]) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|     async_add_entities( | ||||
|         ComelitLightEntity(coordinator, device, config_entry.entry_id) | ||||
|         for device in coordinator.data[LIGHT].values() | ||||
|                 if device.index in new_devices | ||||
|     ) | ||||
|  | ||||
|     _check_device() | ||||
|     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity): | ||||
|     """Light device.""" | ||||
|   | ||||
| @@ -7,6 +7,6 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aiocomelit"], | ||||
|   "quality_scale": "platinum", | ||||
|   "quality_scale": "silver", | ||||
|   "requirements": ["aiocomelit==0.12.3"] | ||||
| } | ||||
|   | ||||
| @@ -57,7 +57,9 @@ rules: | ||||
|   docs-supported-functions: done | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   dynamic-devices: done | ||||
|   dynamic-devices: | ||||
|     status: todo | ||||
|     comment: missing implementation | ||||
|   entity-category: | ||||
|     status: exempt | ||||
|     comment: no config or diagnostic entities | ||||
| @@ -70,7 +72,9 @@ rules: | ||||
|   repair-issues: | ||||
|     status: exempt | ||||
|     comment: no known use cases for repair issues or flows, yet | ||||
|   stale-devices: done | ||||
|   stale-devices: | ||||
|     status: todo | ||||
|     comment: missing implementation | ||||
|  | ||||
|   # Platinum | ||||
|   async-dependency: done | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from __future__ import annotations | ||||
|  | ||||
| from typing import Final, cast | ||||
|  | ||||
| from aiocomelit.api import ComelitSerialBridgeObject, ComelitVedoZoneObject | ||||
| from aiocomelit import ComelitSerialBridgeObject, ComelitVedoZoneObject | ||||
| from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState | ||||
|  | ||||
| from homeassistant.components.sensor import ( | ||||
| @@ -65,24 +65,15 @@ async def async_setup_bridge_entry( | ||||
|  | ||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||
|  | ||||
|     known_devices: set[int] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data[OTHER]) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|             async_add_entities( | ||||
|     entities: list[ComelitBridgeSensorEntity] = [] | ||||
|     for device in coordinator.data[OTHER].values(): | ||||
|         entities.extend( | ||||
|             ComelitBridgeSensorEntity( | ||||
|                 coordinator, device, config_entry.entry_id, sensor_desc | ||||
|             ) | ||||
|             for sensor_desc in SENSOR_BRIDGE_TYPES | ||||
|                 for device in coordinator.data[OTHER].values() | ||||
|                 if device.index in new_devices | ||||
|         ) | ||||
|  | ||||
|     _check_device() | ||||
|     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|     async_add_entities(entities) | ||||
|  | ||||
|  | ||||
| async def async_setup_vedo_entry( | ||||
| @@ -94,24 +85,15 @@ async def async_setup_vedo_entry( | ||||
|  | ||||
|     coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) | ||||
|  | ||||
|     known_devices: set[int] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data["alarm_zones"]) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|             async_add_entities( | ||||
|     entities: list[ComelitVedoSensorEntity] = [] | ||||
|     for device in coordinator.data["alarm_zones"].values(): | ||||
|         entities.extend( | ||||
|             ComelitVedoSensorEntity( | ||||
|                 coordinator, device, config_entry.entry_id, sensor_desc | ||||
|             ) | ||||
|             for sensor_desc in SENSOR_VEDO_TYPES | ||||
|                 for device in coordinator.data["alarm_zones"].values() | ||||
|                 if device.index in new_devices | ||||
|         ) | ||||
|  | ||||
|     _check_device() | ||||
|     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|     async_add_entities(entities) | ||||
|  | ||||
|  | ||||
| class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity): | ||||
|   | ||||
| @@ -39,25 +39,6 @@ async def async_setup_entry( | ||||
|     ) | ||||
|     async_add_entities(entities) | ||||
|  | ||||
|     known_devices: dict[str, set[int]] = { | ||||
|         dev_type: set() for dev_type in (IRRIGATION, OTHER) | ||||
|     } | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         for dev_type in (IRRIGATION, OTHER): | ||||
|             current_devices = set(coordinator.data[dev_type]) | ||||
|             new_devices = current_devices - known_devices[dev_type] | ||||
|             if new_devices: | ||||
|                 known_devices[dev_type].update(new_devices) | ||||
|                 async_add_entities( | ||||
|                     ComelitSwitchEntity(coordinator, device, config_entry.entry_id) | ||||
|                     for device in coordinator.data[dev_type].values() | ||||
|                     if device.index in new_devices | ||||
|                 ) | ||||
|  | ||||
|     _check_device() | ||||
|     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity): | ||||
|     """Switch device.""" | ||||
|   | ||||
| @@ -1,45 +0,0 @@ | ||||
| """The Compit integration.""" | ||||
|  | ||||
| from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth | ||||
|  | ||||
| from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
|  | ||||
| from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator | ||||
|  | ||||
| PLATFORMS = [ | ||||
|     Platform.CLIMATE, | ||||
| ] | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool: | ||||
|     """Set up Compit from a config entry.""" | ||||
|  | ||||
|     session = async_get_clientsession(hass) | ||||
|     connector = CompitApiConnector(session) | ||||
|     try: | ||||
|         connected = await connector.init( | ||||
|             entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], hass.config.language | ||||
|         ) | ||||
|     except CannotConnect as e: | ||||
|         raise ConfigEntryNotReady(f"Error while connecting to Compit: {e}") from e | ||||
|     except InvalidAuth as e: | ||||
|         raise ConfigEntryAuthFailed( | ||||
|             f"Invalid credentials for {entry.data[CONF_EMAIL]}" | ||||
|         ) from e | ||||
|  | ||||
|     if not connected: | ||||
|         raise ConfigEntryAuthFailed("Authentication API error") | ||||
|  | ||||
|     coordinator = CompitDataUpdateCoordinator(hass, entry, connector) | ||||
|     await coordinator.async_config_entry_first_refresh() | ||||
|     entry.runtime_data = coordinator | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool: | ||||
|     """Unload an entry for the Compit integration.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) | ||||
| @@ -1,265 +0,0 @@ | ||||
| """Module contains the CompitClimate class for controlling climate entities.""" | ||||
|  | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from compit_inext_api import Param, Parameter | ||||
| from compit_inext_api.consts import ( | ||||
|     CompitFanMode, | ||||
|     CompitHVACMode, | ||||
|     CompitParameter, | ||||
|     CompitPresetMode, | ||||
| ) | ||||
| from propcache.api import cached_property | ||||
|  | ||||
| from homeassistant.components.climate import ( | ||||
|     FAN_AUTO, | ||||
|     FAN_HIGH, | ||||
|     FAN_LOW, | ||||
|     FAN_MEDIUM, | ||||
|     FAN_OFF, | ||||
|     PRESET_AWAY, | ||||
|     PRESET_ECO, | ||||
|     PRESET_HOME, | ||||
|     PRESET_NONE, | ||||
|     ClimateEntity, | ||||
|     ClimateEntityFeature, | ||||
|     HVACMode, | ||||
| ) | ||||
| from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ServiceValidationError | ||||
| from homeassistant.helpers.device_registry import DeviceInfo | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity | ||||
|  | ||||
| from .const import DOMAIN, MANUFACTURER_NAME | ||||
| from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator | ||||
|  | ||||
| _LOGGER: logging.Logger = logging.getLogger(__name__) | ||||
|  | ||||
| # Device class for climate devices in Compit system | ||||
| CLIMATE_DEVICE_CLASS = 10 | ||||
| PARALLEL_UPDATES = 0 | ||||
|  | ||||
| COMPIT_MODE_MAP = { | ||||
|     CompitHVACMode.COOL: HVACMode.COOL, | ||||
|     CompitHVACMode.HEAT: HVACMode.HEAT, | ||||
|     CompitHVACMode.OFF: HVACMode.OFF, | ||||
| } | ||||
|  | ||||
| COMPIT_FANSPEED_MAP = { | ||||
|     CompitFanMode.OFF: FAN_OFF, | ||||
|     CompitFanMode.AUTO: FAN_AUTO, | ||||
|     CompitFanMode.LOW: FAN_LOW, | ||||
|     CompitFanMode.MEDIUM: FAN_MEDIUM, | ||||
|     CompitFanMode.HIGH: FAN_HIGH, | ||||
|     CompitFanMode.HOLIDAY: FAN_AUTO, | ||||
| } | ||||
|  | ||||
| COMPIT_PRESET_MAP = { | ||||
|     CompitPresetMode.AUTO: PRESET_HOME, | ||||
|     CompitPresetMode.HOLIDAY: PRESET_ECO, | ||||
|     CompitPresetMode.MANUAL: PRESET_NONE, | ||||
|     CompitPresetMode.AWAY: PRESET_AWAY, | ||||
| } | ||||
|  | ||||
| HVAC_MODE_TO_COMPIT_MODE = {v: k for k, v in COMPIT_MODE_MAP.items()} | ||||
| FAN_MODE_TO_COMPIT_FAN_MODE = {v: k for k, v in COMPIT_FANSPEED_MAP.items()} | ||||
| PRESET_MODE_TO_COMPIT_PRESET_MODE = {v: k for k, v in COMPIT_PRESET_MAP.items()} | ||||
|  | ||||
|  | ||||
| async def async_setup_entry( | ||||
|     hass: HomeAssistant, | ||||
|     entry: CompitConfigEntry, | ||||
|     async_add_devices: AddConfigEntryEntitiesCallback, | ||||
| ) -> None: | ||||
|     """Set up the CompitClimate platform from a config entry.""" | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|     climate_entities = [] | ||||
|     for device_id in coordinator.connector.all_devices: | ||||
|         device = coordinator.connector.all_devices[device_id] | ||||
|  | ||||
|         if device.definition.device_class == CLIMATE_DEVICE_CLASS: | ||||
|             climate_entities.append( | ||||
|                 CompitClimate( | ||||
|                     coordinator, | ||||
|                     device_id, | ||||
|                     { | ||||
|                         parameter.parameter_code: parameter | ||||
|                         for parameter in device.definition.parameters | ||||
|                     }, | ||||
|                     device.definition.name, | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|     async_add_devices(climate_entities) | ||||
|  | ||||
|  | ||||
| class CompitClimate(CoordinatorEntity[CompitDataUpdateCoordinator], ClimateEntity): | ||||
|     """Representation of a Compit climate device.""" | ||||
|  | ||||
|     _attr_temperature_unit = UnitOfTemperature.CELSIUS | ||||
|     _attr_hvac_modes = [*COMPIT_MODE_MAP.values()] | ||||
|     _attr_name = None | ||||
|     _attr_has_entity_name = True | ||||
|     _attr_supported_features = ( | ||||
|         ClimateEntityFeature.TARGET_TEMPERATURE | ||||
|         | ClimateEntityFeature.FAN_MODE | ||||
|         | ClimateEntityFeature.PRESET_MODE | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         coordinator: CompitDataUpdateCoordinator, | ||||
|         device_id: int, | ||||
|         parameters: dict[str, Parameter], | ||||
|         device_name: str, | ||||
|     ) -> None: | ||||
|         """Initialize the climate device.""" | ||||
|         super().__init__(coordinator) | ||||
|         self._attr_unique_id = f"{device_name}_{device_id}" | ||||
|         self._attr_device_info = DeviceInfo( | ||||
|             identifiers={(DOMAIN, str(device_id))}, | ||||
|             name=device_name, | ||||
|             manufacturer=MANUFACTURER_NAME, | ||||
|             model=device_name, | ||||
|         ) | ||||
|  | ||||
|         self.parameters = parameters | ||||
|         self.device_id = device_id | ||||
|         self.available_presets: Parameter | None = self.parameters.get( | ||||
|             CompitParameter.PRESET_MODE.value | ||||
|         ) | ||||
|         self.available_fan_modes: Parameter | None = self.parameters.get( | ||||
|             CompitParameter.FAN_MODE.value | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             super().available | ||||
|             and self.device_id in self.coordinator.connector.all_devices | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def current_temperature(self) -> float | None: | ||||
|         """Return the current temperature.""" | ||||
|         value = self.get_parameter_value(CompitParameter.CURRENT_TEMPERATURE) | ||||
|         if value is None: | ||||
|             return None | ||||
|         return float(value.value) | ||||
|  | ||||
|     @property | ||||
|     def target_temperature(self) -> float | None: | ||||
|         """Return the temperature we try to reach.""" | ||||
|         value = self.get_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE) | ||||
|         if value is None: | ||||
|             return None | ||||
|         return float(value.value) | ||||
|  | ||||
|     @cached_property | ||||
|     def preset_modes(self) -> list[str] | None: | ||||
|         """Return the available preset modes.""" | ||||
|         if self.available_presets is None or self.available_presets.details is None: | ||||
|             return [] | ||||
|  | ||||
|         preset_modes = [] | ||||
|         for item in self.available_presets.details: | ||||
|             if item is not None: | ||||
|                 ha_preset = COMPIT_PRESET_MAP.get(CompitPresetMode(item.state)) | ||||
|                 if ha_preset and ha_preset not in preset_modes: | ||||
|                     preset_modes.append(ha_preset) | ||||
|  | ||||
|         return preset_modes | ||||
|  | ||||
|     @cached_property | ||||
|     def fan_modes(self) -> list[str] | None: | ||||
|         """Return the available fan modes.""" | ||||
|         if self.available_fan_modes is None or self.available_fan_modes.details is None: | ||||
|             return [] | ||||
|  | ||||
|         fan_modes = [] | ||||
|         for item in self.available_fan_modes.details: | ||||
|             if item is not None: | ||||
|                 ha_fan_mode = COMPIT_FANSPEED_MAP.get(CompitFanMode(item.state)) | ||||
|                 if ha_fan_mode and ha_fan_mode not in fan_modes: | ||||
|                     fan_modes.append(ha_fan_mode) | ||||
|  | ||||
|         return fan_modes | ||||
|  | ||||
|     @property | ||||
|     def preset_mode(self) -> str | None: | ||||
|         """Return the current preset mode.""" | ||||
|         preset_mode = self.get_parameter_value(CompitParameter.PRESET_MODE) | ||||
|  | ||||
|         if preset_mode: | ||||
|             compit_preset_mode = CompitPresetMode(preset_mode.value) | ||||
|             return COMPIT_PRESET_MAP.get(compit_preset_mode) | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def fan_mode(self) -> str | None: | ||||
|         """Return the current fan mode.""" | ||||
|         fan_mode = self.get_parameter_value(CompitParameter.FAN_MODE) | ||||
|         if fan_mode: | ||||
|             compit_fan_mode = CompitFanMode(fan_mode.value) | ||||
|             return COMPIT_FANSPEED_MAP.get(compit_fan_mode) | ||||
|         return None | ||||
|  | ||||
|     @property | ||||
|     def hvac_mode(self) -> HVACMode | None: | ||||
|         """Return the current HVAC mode.""" | ||||
|         hvac_mode = self.get_parameter_value(CompitParameter.HVAC_MODE) | ||||
|         if hvac_mode: | ||||
|             compit_hvac_mode = CompitHVACMode(hvac_mode.value) | ||||
|             return COMPIT_MODE_MAP.get(compit_hvac_mode) | ||||
|         return None | ||||
|  | ||||
|     async def async_set_temperature(self, **kwargs: Any) -> None: | ||||
|         """Set new target temperature.""" | ||||
|         temp = kwargs.get(ATTR_TEMPERATURE) | ||||
|         if temp is None: | ||||
|             raise ServiceValidationError("Temperature argument missing") | ||||
|         await self.set_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE, temp) | ||||
|  | ||||
|     async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: | ||||
|         """Set new target HVAC mode.""" | ||||
|  | ||||
|         if not (mode := HVAC_MODE_TO_COMPIT_MODE.get(hvac_mode)): | ||||
|             raise ServiceValidationError(f"Invalid hvac mode {hvac_mode}") | ||||
|  | ||||
|         await self.set_parameter_value(CompitParameter.HVAC_MODE, mode.value) | ||||
|  | ||||
|     async def async_set_preset_mode(self, preset_mode: str) -> None: | ||||
|         """Set new target preset mode.""" | ||||
|  | ||||
|         compit_preset = PRESET_MODE_TO_COMPIT_PRESET_MODE.get(preset_mode) | ||||
|         if compit_preset is None: | ||||
|             raise ServiceValidationError(f"Invalid preset mode: {preset_mode}") | ||||
|  | ||||
|         await self.set_parameter_value(CompitParameter.PRESET_MODE, compit_preset.value) | ||||
|  | ||||
|     async def async_set_fan_mode(self, fan_mode: str) -> None: | ||||
|         """Set new target fan mode.""" | ||||
|  | ||||
|         compit_fan_mode = FAN_MODE_TO_COMPIT_FAN_MODE.get(fan_mode) | ||||
|         if compit_fan_mode is None: | ||||
|             raise ServiceValidationError(f"Invalid fan mode: {fan_mode}") | ||||
|  | ||||
|         await self.set_parameter_value(CompitParameter.FAN_MODE, compit_fan_mode.value) | ||||
|  | ||||
|     async def set_parameter_value(self, parameter: CompitParameter, value: int) -> None: | ||||
|         """Call the API to set a parameter to a new value.""" | ||||
|         await self.coordinator.connector.set_device_parameter( | ||||
|             self.device_id, parameter, value | ||||
|         ) | ||||
|         self.async_write_ha_state() | ||||
|  | ||||
|     def get_parameter_value(self, parameter: CompitParameter) -> Param | None: | ||||
|         """Get the parameter value from the device state.""" | ||||
|         return self.coordinator.connector.get_device_parameter( | ||||
|             self.device_id, parameter | ||||
|         ) | ||||
| @@ -1,110 +0,0 @@ | ||||
| """Config flow for Compit integration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Mapping | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import CONF_EMAIL, CONF_PASSWORD | ||||
| from homeassistant.helpers.aiohttp_client import async_create_clientsession | ||||
|  | ||||
| from .const import DOMAIN | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|     { | ||||
|         vol.Required(CONF_EMAIL): str, | ||||
|         vol.Required(CONF_PASSWORD): str, | ||||
|     } | ||||
| ) | ||||
|  | ||||
| STEP_REAUTH_SCHEMA = vol.Schema( | ||||
|     { | ||||
|         vol.Required(CONF_PASSWORD): str, | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| class CompitConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Compit.""" | ||||
|  | ||||
|     VERSION = 1 | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, | ||||
|         user_input: dict[str, Any] | None = None, | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the initial step.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         if user_input is not None: | ||||
|             session = async_create_clientsession(self.hass) | ||||
|             api = CompitApiConnector(session) | ||||
|             success = False | ||||
|             try: | ||||
|                 success = await api.init( | ||||
|                     user_input[CONF_EMAIL], | ||||
|                     user_input[CONF_PASSWORD], | ||||
|                     self.hass.config.language, | ||||
|                 ) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except InvalidAuth: | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unexpected exception") | ||||
|                 errors["base"] = "unknown" | ||||
|             else: | ||||
|                 if not success: | ||||
|                     # Api returned unexpected result but no exception | ||||
|                     _LOGGER.error("Compit api returned unexpected result") | ||||
|                     errors["base"] = "unknown" | ||||
|                 else: | ||||
|                     await self.async_set_unique_id(user_input[CONF_EMAIL]) | ||||
|  | ||||
|                     if self.source == SOURCE_REAUTH: | ||||
|                         self._abort_if_unique_id_mismatch() | ||||
|                         return self.async_update_reload_and_abort( | ||||
|                             self._get_reauth_entry(), data_updates=user_input | ||||
|                         ) | ||||
|                     self._abort_if_unique_id_configured() | ||||
|                     return self.async_create_entry( | ||||
|                         title=user_input[CONF_EMAIL], data=user_input | ||||
|                     ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|  | ||||
|     async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: | ||||
|         """Handle re-auth.""" | ||||
|         return await self.async_step_reauth_confirm() | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Confirm re-authentication.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         reauth_entry = self._get_reauth_entry() | ||||
|         reauth_entry_data = reauth_entry.data | ||||
|  | ||||
|         if user_input: | ||||
|             # Reuse async_step_user with combined credentials | ||||
|             return await self.async_step_user( | ||||
|                 { | ||||
|                     CONF_EMAIL: reauth_entry_data[CONF_EMAIL], | ||||
|                     CONF_PASSWORD: user_input[CONF_PASSWORD], | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=STEP_REAUTH_SCHEMA, | ||||
|             description_placeholders={CONF_EMAIL: reauth_entry_data[CONF_EMAIL]}, | ||||
|             errors=errors, | ||||
|         ) | ||||
| @@ -1,4 +0,0 @@ | ||||
| """Constants for the Compit integration.""" | ||||
|  | ||||
| DOMAIN = "compit" | ||||
| MANUFACTURER_NAME = "Compit" | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user