mirror of
				https://github.com/home-assistant/core.git
				synced 2025-10-30 22:19:37 +00:00 
			
		
		
		
	Compare commits
	
		
			3 Commits
		
	
	
		
			cdce8p-bui
			...
			mqtt-subsc
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | a784ec6454 | ||
|   | 8c8b1df11f | ||
|   | aabcff9653 | 
							
								
								
									
										2
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -326,7 +326,7 @@ jobs: | |||||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||||
|  |  | ||||||
|       - name: Install Cosign |       - name: Install Cosign | ||||||
|         uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 |         uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0 | ||||||
|         with: |         with: | ||||||
|           cosign-release: "v2.2.3" |           cosign-release: "v2.2.3" | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										30
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										30
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							| @@ -37,12 +37,12 @@ on: | |||||||
|         type: boolean |         type: boolean | ||||||
|  |  | ||||||
| env: | env: | ||||||
|   CACHE_VERSION: 9 |   CACHE_VERSION: 8 | ||||||
|   UV_CACHE_VERSION: 1 |   UV_CACHE_VERSION: 1 | ||||||
|   MYPY_CACHE_VERSION: 1 |   MYPY_CACHE_VERSION: 1 | ||||||
|   HA_SHORT_VERSION: "2025.11" |   HA_SHORT_VERSION: "2025.11" | ||||||
|   DEFAULT_PYTHON: "3.13" |   DEFAULT_PYTHON: "3.13" | ||||||
|   ALL_PYTHON_VERSIONS: "['3.13', '3.14']" |   ALL_PYTHON_VERSIONS: "['3.13']" | ||||||
|   # 10.3 is the oldest supported version |   # 10.3 is the oldest supported version | ||||||
|   # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) |   # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) | ||||||
|   # 10.6 is the current long-term-support |   # 10.6 is the current long-term-support | ||||||
| @@ -428,7 +428,7 @@ jobs: | |||||||
|     timeout-minutes: 60 |     timeout-minutes: 60 | ||||||
|     strategy: |     strategy: | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: &matrix-python ${{ fromJson(needs.info.outputs.python_versions) }} |         python-version: ${{ fromJSON(needs.info.outputs.python_versions) }} | ||||||
|     steps: |     steps: | ||||||
|       - *checkout |       - *checkout | ||||||
|       - &setup-python-matrix |       - &setup-python-matrix | ||||||
| @@ -514,7 +514,9 @@ jobs: | |||||||
|         if: steps.cache-apt-check.outputs.cache-hit != 'true' |         if: steps.cache-apt-check.outputs.cache-hit != 'true' | ||||||
|         uses: &actions-cache-save actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 |         uses: &actions-cache-save actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | ||||||
|         with: |         with: | ||||||
|           path: *path-apt-cache |           path: | | ||||||
|  |             ${{ env.APT_CACHE_DIR }} | ||||||
|  |             ${{ env.APT_LIST_CACHE_DIR }} | ||||||
|           key: *key-apt-cache |           key: *key-apt-cache | ||||||
|       - name: Create Python virtual environment |       - name: Create Python virtual environment | ||||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' |         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||||
| @@ -523,7 +525,7 @@ jobs: | |||||||
|           . venv/bin/activate |           . venv/bin/activate | ||||||
|           python --version |           python --version | ||||||
|           pip install "$(grep '^uv' < requirements.txt)" |           pip install "$(grep '^uv' < requirements.txt)" | ||||||
|           uv pip install -U "pip>=25.2" |           uv pip install -U "pip>=21.3.1" setuptools wheel | ||||||
|           uv pip install -r requirements.txt |           uv pip install -r requirements.txt | ||||||
|           python -m script.gen_requirements_all ci |           python -m script.gen_requirements_all ci | ||||||
|           uv pip install -r requirements_all_pytest.txt -r requirements_test.txt |           uv pip install -r requirements_all_pytest.txt -r requirements_test.txt | ||||||
| @@ -623,7 +625,7 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       - *checkout |       - *checkout | ||||||
|       - name: Dependency review |       - name: Dependency review | ||||||
|         uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1 |         uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0 | ||||||
|         with: |         with: | ||||||
|           license-check: false # We use our own license audit checks |           license-check: false # We use our own license audit checks | ||||||
|  |  | ||||||
| @@ -639,7 +641,7 @@ jobs: | |||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: *matrix-python |         python-version: ${{ fromJson(needs.info.outputs.python_versions) }} | ||||||
|     steps: |     steps: | ||||||
|       - *checkout |       - *checkout | ||||||
|       - *setup-python-matrix |       - *setup-python-matrix | ||||||
| @@ -739,7 +741,7 @@ jobs: | |||||||
|       - name: Generate partial mypy restore key |       - name: Generate partial mypy restore key | ||||||
|         id: generate-mypy-key |         id: generate-mypy-key | ||||||
|         run: | |         run: | | ||||||
|           mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3) |           mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3) | ||||||
|           echo "version=$mypy_version" >> $GITHUB_OUTPUT |           echo "version=$mypy_version" >> $GITHUB_OUTPUT | ||||||
|           echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{ |           echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{ | ||||||
|             env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT |             env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT | ||||||
| @@ -836,8 +838,8 @@ jobs: | |||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: *matrix-python |         python-version: ${{ fromJson(needs.info.outputs.python_versions) }} | ||||||
|         group: &matrix-group ${{ fromJson(needs.info.outputs.test_groups) }} |         group: ${{ fromJson(needs.info.outputs.test_groups) }} | ||||||
|     steps: |     steps: | ||||||
|       - *cache-restore-apt |       - *cache-restore-apt | ||||||
|       - name: Install additional OS dependencies |       - name: Install additional OS dependencies | ||||||
| @@ -962,7 +964,7 @@ jobs: | |||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: *matrix-python |         python-version: ${{ fromJson(needs.info.outputs.python_versions) }} | ||||||
|         mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }} |         mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }} | ||||||
|     steps: |     steps: | ||||||
|       - *cache-restore-apt |       - *cache-restore-apt | ||||||
| @@ -1079,7 +1081,7 @@ jobs: | |||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: *matrix-python |         python-version: ${{ fromJson(needs.info.outputs.python_versions) }} | ||||||
|         postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }} |         postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }} | ||||||
|     steps: |     steps: | ||||||
|       - *cache-restore-apt |       - *cache-restore-apt | ||||||
| @@ -1216,8 +1218,8 @@ jobs: | |||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
|         python-version: *matrix-python |         python-version: ${{ fromJson(needs.info.outputs.python_versions) }} | ||||||
|         group: *matrix-group |         group: ${{ fromJson(needs.info.outputs.test_groups) }} | ||||||
|     steps: |     steps: | ||||||
|       - *cache-restore-apt |       - *cache-restore-apt | ||||||
|       - name: Install additional OS dependencies |       - name: Install additional OS dependencies | ||||||
|   | |||||||
							
								
								
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @@ -24,11 +24,11 @@ jobs: | |||||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||||
|  |  | ||||||
|       - name: Initialize CodeQL |       - name: Initialize CodeQL | ||||||
|         uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8 |         uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6 | ||||||
|         with: |         with: | ||||||
|           languages: python |           languages: python | ||||||
|  |  | ||||||
|       - name: Perform CodeQL Analysis |       - name: Perform CodeQL Analysis | ||||||
|         uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8 |         uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6 | ||||||
|         with: |         with: | ||||||
|           category: "/language:python" |           category: "/language:python" | ||||||
|   | |||||||
							
								
								
									
										102
									
								
								.github/workflows/wheels.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										102
									
								
								.github/workflows/wheels.yml
									
									
									
									
										vendored
									
									
								
							| @@ -31,8 +31,7 @@ jobs: | |||||||
|     outputs: |     outputs: | ||||||
|       architectures: ${{ steps.info.outputs.architectures }} |       architectures: ${{ steps.info.outputs.architectures }} | ||||||
|     steps: |     steps: | ||||||
|       - &checkout |       - name: Checkout the repository | ||||||
|         name: Checkout the repository |  | ||||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||||
|  |  | ||||||
|       - name: Set up Python ${{ env.DEFAULT_PYTHON }} |       - name: Set up Python ${{ env.DEFAULT_PYTHON }} | ||||||
| @@ -80,8 +79,6 @@ jobs: | |||||||
|  |  | ||||||
|             # Add additional pip wheel build constraints |             # Add additional pip wheel build constraints | ||||||
|             echo "PIP_CONSTRAINT=build_constraints.txt" |             echo "PIP_CONSTRAINT=build_constraints.txt" | ||||||
|  |  | ||||||
|             echo 'CFLAGS="-Wno-error=int-conversion"' |  | ||||||
|           ) > .env_file |           ) > .env_file | ||||||
|  |  | ||||||
|       - name: Write pip wheel build constraints |       - name: Write pip wheel build constraints | ||||||
| @@ -94,7 +91,7 @@ jobs: | |||||||
|           ) > build_constraints.txt |           ) > build_constraints.txt | ||||||
|  |  | ||||||
|       - name: Upload env_file |       - name: Upload env_file | ||||||
|         uses: &actions-upload-artifact actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: env_file |           name: env_file | ||||||
|           path: ./.env_file |           path: ./.env_file | ||||||
| @@ -102,14 +99,14 @@ jobs: | |||||||
|           overwrite: true |           overwrite: true | ||||||
|  |  | ||||||
|       - name: Upload build_constraints |       - name: Upload build_constraints | ||||||
|         uses: *actions-upload-artifact |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: build_constraints |           name: build_constraints | ||||||
|           path: ./build_constraints.txt |           path: ./build_constraints.txt | ||||||
|           overwrite: true |           overwrite: true | ||||||
|  |  | ||||||
|       - name: Upload requirements_diff |       - name: Upload requirements_diff | ||||||
|         uses: *actions-upload-artifact |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: requirements_diff |           name: requirements_diff | ||||||
|           path: ./requirements_diff.txt |           path: ./requirements_diff.txt | ||||||
| @@ -121,50 +118,37 @@ jobs: | |||||||
|           python -m script.gen_requirements_all ci |           python -m script.gen_requirements_all ci | ||||||
|  |  | ||||||
|       - name: Upload requirements_all_wheels |       - name: Upload requirements_all_wheels | ||||||
|         uses: *actions-upload-artifact |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: requirements_all_wheels |           name: requirements_all_wheels | ||||||
|           path: ./requirements_all_wheels_*.txt |           path: ./requirements_all_wheels_*.txt | ||||||
|  |  | ||||||
|   core: |   core: | ||||||
|     name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2) |     name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2) | ||||||
|     if: false && github.repository_owner == 'home-assistant' |     if: github.repository_owner == 'home-assistant' | ||||||
|     needs: init |     needs: init | ||||||
|     runs-on: ${{ matrix.os }} |     runs-on: ubuntu-latest | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: &matrix-build |       matrix: | ||||||
|         abi: ["cp314"] |         abi: ["cp313"] | ||||||
|         arch: ${{ fromJson(needs.init.outputs.architectures) }} |         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||||
|         include: |  | ||||||
|           - os: ubuntu-latest |  | ||||||
|           - arch: aarch64 |  | ||||||
|             os: ubuntu-24.04-arm |  | ||||||
|         exclude: |  | ||||||
|           - abi: cp314 |  | ||||||
|             arch: armv7 |  | ||||||
|           - abi: cp314 |  | ||||||
|             arch: armhf |  | ||||||
|           - abi: cp314 |  | ||||||
|             arch: i386 |  | ||||||
|     steps: |     steps: | ||||||
|       - *checkout |       - name: Checkout the repository | ||||||
|  |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||||
|  |  | ||||||
|       - &download-env-file |       - name: Download env_file | ||||||
|         name: Download env_file |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|         uses: &actions-download-artifact actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 |  | ||||||
|         with: |         with: | ||||||
|           name: env_file |           name: env_file | ||||||
|  |  | ||||||
|       - &download-build-constraints |       - name: Download build_constraints | ||||||
|         name: Download build_constraints |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|         uses: *actions-download-artifact |  | ||||||
|         with: |         with: | ||||||
|           name: build_constraints |           name: build_constraints | ||||||
|  |  | ||||||
|       - &download-requirements-diff |       - name: Download requirements_diff | ||||||
|         name: Download requirements_diff |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|         uses: *actions-download-artifact |  | ||||||
|         with: |         with: | ||||||
|           name: requirements_diff |           name: requirements_diff | ||||||
|  |  | ||||||
| @@ -176,7 +160,7 @@ jobs: | |||||||
|  |  | ||||||
|       # home-assistant/wheels doesn't support sha pinning |       # home-assistant/wheels doesn't support sha pinning | ||||||
|       - name: Build wheels |       - name: Build wheels | ||||||
|         uses: &home-assistant-wheels home-assistant/wheels@2025.10.0 |         uses: home-assistant/wheels@2025.09.1 | ||||||
|         with: |         with: | ||||||
|           abi: ${{ matrix.abi }} |           abi: ${{ matrix.abi }} | ||||||
|           tag: musllinux_1_2 |           tag: musllinux_1_2 | ||||||
| @@ -193,19 +177,33 @@ jobs: | |||||||
|     name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }} |     name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }} | ||||||
|     if: github.repository_owner == 'home-assistant' |     if: github.repository_owner == 'home-assistant' | ||||||
|     needs: init |     needs: init | ||||||
|     runs-on: ${{ matrix.os }} |     runs-on: ubuntu-latest | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: *matrix-build |       matrix: | ||||||
|  |         abi: ["cp313"] | ||||||
|  |         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||||
|     steps: |     steps: | ||||||
|       - *checkout |       - name: Checkout the repository | ||||||
|  |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||||
|  |  | ||||||
|       - *download-env-file |       - name: Download env_file | ||||||
|       - *download-build-constraints |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|       - *download-requirements-diff |         with: | ||||||
|  |           name: env_file | ||||||
|  |  | ||||||
|  |       - name: Download build_constraints | ||||||
|  |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|  |         with: | ||||||
|  |           name: build_constraints | ||||||
|  |  | ||||||
|  |       - name: Download requirements_diff | ||||||
|  |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|  |         with: | ||||||
|  |           name: requirements_diff | ||||||
|  |  | ||||||
|       - name: Download requirements_all_wheels |       - name: Download requirements_all_wheels | ||||||
|         uses: *actions-download-artifact |         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||||
|         with: |         with: | ||||||
|           name: requirements_all_wheels |           name: requirements_all_wheels | ||||||
|  |  | ||||||
| @@ -221,29 +219,9 @@ jobs: | |||||||
|           sed -i "/uv/d" requirements.txt |           sed -i "/uv/d" requirements.txt | ||||||
|           sed -i "/uv/d" requirements_diff.txt |           sed -i "/uv/d" requirements_diff.txt | ||||||
|  |  | ||||||
|       - name: Create requirements file for custom build |  | ||||||
|         run: | |  | ||||||
|           touch requirements_custom.txt |  | ||||||
|           echo "netifaces==0.11.0" >> requirements_custom.txt |  | ||||||
|  |  | ||||||
|       - name: Build wheels (custom) |  | ||||||
|         uses: cdce8p/wheels@master |  | ||||||
|         with: |  | ||||||
|           abi: ${{ matrix.abi }} |  | ||||||
|           tag: musllinux_1_2 |  | ||||||
|           arch: ${{ matrix.arch }} |  | ||||||
|           wheels-key: ${{ secrets.WHEELS_KEY }} |  | ||||||
|           env-file: true |  | ||||||
|           apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev" |  | ||||||
|           skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl |  | ||||||
|           constraints: "homeassistant/package_constraints.txt" |  | ||||||
|           requirements: "requirements_custom.txt" |  | ||||||
|           verbose: true |  | ||||||
|  |  | ||||||
|       # home-assistant/wheels doesn't support sha pinning |       # home-assistant/wheels doesn't support sha pinning | ||||||
|       - name: Build wheels |       - name: Build wheels | ||||||
|         uses: *home-assistant-wheels |         uses: home-assistant/wheels@2025.09.1 | ||||||
|         if: false |  | ||||||
|         with: |         with: | ||||||
|           abi: ${{ matrix.abi }} |           abi: ${{ matrix.abi }} | ||||||
|           tag: musllinux_1_2 |           tag: musllinux_1_2 | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -79,6 +79,7 @@ junit.xml | |||||||
| .project | .project | ||||||
| .pydevproject | .pydevproject | ||||||
|  |  | ||||||
|  | .python-version | ||||||
| .tool-versions | .tool-versions | ||||||
|  |  | ||||||
| # emacs auto backups | # emacs auto backups | ||||||
|   | |||||||
| @@ -1 +0,0 @@ | |||||||
| 3.13 |  | ||||||
| @@ -182,6 +182,7 @@ homeassistant.components.efergy.* | |||||||
| homeassistant.components.eheimdigital.* | homeassistant.components.eheimdigital.* | ||||||
| homeassistant.components.electrasmart.* | homeassistant.components.electrasmart.* | ||||||
| homeassistant.components.electric_kiwi.* | homeassistant.components.electric_kiwi.* | ||||||
|  | homeassistant.components.elevenlabs.* | ||||||
| homeassistant.components.elgato.* | homeassistant.components.elgato.* | ||||||
| homeassistant.components.elkm1.* | homeassistant.components.elkm1.* | ||||||
| homeassistant.components.emulated_hue.* | homeassistant.components.emulated_hue.* | ||||||
| @@ -220,7 +221,6 @@ homeassistant.components.generic_thermostat.* | |||||||
| homeassistant.components.geo_location.* | homeassistant.components.geo_location.* | ||||||
| homeassistant.components.geocaching.* | homeassistant.components.geocaching.* | ||||||
| homeassistant.components.gios.* | homeassistant.components.gios.* | ||||||
| homeassistant.components.github.* |  | ||||||
| homeassistant.components.glances.* | homeassistant.components.glances.* | ||||||
| homeassistant.components.go2rtc.* | homeassistant.components.go2rtc.* | ||||||
| homeassistant.components.goalzero.* | homeassistant.components.goalzero.* | ||||||
|   | |||||||
							
								
								
									
										22
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										22
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							| @@ -46,8 +46,6 @@ build.json @home-assistant/supervisor | |||||||
| /tests/components/accuweather/ @bieniu | /tests/components/accuweather/ @bieniu | ||||||
| /homeassistant/components/acmeda/ @atmurray | /homeassistant/components/acmeda/ @atmurray | ||||||
| /tests/components/acmeda/ @atmurray | /tests/components/acmeda/ @atmurray | ||||||
| /homeassistant/components/actron_air/ @kclif9 @JagadishDhanamjayam |  | ||||||
| /tests/components/actron_air/ @kclif9 @JagadishDhanamjayam |  | ||||||
| /homeassistant/components/adax/ @danielhiversen @lazytarget | /homeassistant/components/adax/ @danielhiversen @lazytarget | ||||||
| /tests/components/adax/ @danielhiversen @lazytarget | /tests/components/adax/ @danielhiversen @lazytarget | ||||||
| /homeassistant/components/adguard/ @frenck | /homeassistant/components/adguard/ @frenck | ||||||
| @@ -619,8 +617,6 @@ build.json @home-assistant/supervisor | |||||||
| /tests/components/greeneye_monitor/ @jkeljo | /tests/components/greeneye_monitor/ @jkeljo | ||||||
| /homeassistant/components/group/ @home-assistant/core | /homeassistant/components/group/ @home-assistant/core | ||||||
| /tests/components/group/ @home-assistant/core | /tests/components/group/ @home-assistant/core | ||||||
| /homeassistant/components/growatt_server/ @johanzander |  | ||||||
| /tests/components/growatt_server/ @johanzander |  | ||||||
| /homeassistant/components/guardian/ @bachya | /homeassistant/components/guardian/ @bachya | ||||||
| /tests/components/guardian/ @bachya | /tests/components/guardian/ @bachya | ||||||
| /homeassistant/components/habitica/ @tr4nt0r | /homeassistant/components/habitica/ @tr4nt0r | ||||||
| @@ -766,8 +762,8 @@ build.json @home-assistant/supervisor | |||||||
| /homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | /homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | ||||||
| /tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | /tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | ||||||
| /homeassistant/components/intesishome/ @jnimmo | /homeassistant/components/intesishome/ @jnimmo | ||||||
| /homeassistant/components/iometer/ @jukrebs | /homeassistant/components/iometer/ @MaestroOnICe | ||||||
| /tests/components/iometer/ @jukrebs | /tests/components/iometer/ @MaestroOnICe | ||||||
| /homeassistant/components/ios/ @robbiet480 | /homeassistant/components/ios/ @robbiet480 | ||||||
| /tests/components/ios/ @robbiet480 | /tests/components/ios/ @robbiet480 | ||||||
| /homeassistant/components/iotawatt/ @gtdiehl @jyavenard | /homeassistant/components/iotawatt/ @gtdiehl @jyavenard | ||||||
| @@ -1069,8 +1065,8 @@ build.json @home-assistant/supervisor | |||||||
| /homeassistant/components/nilu/ @hfurubotten | /homeassistant/components/nilu/ @hfurubotten | ||||||
| /homeassistant/components/nina/ @DeerMaximum | /homeassistant/components/nina/ @DeerMaximum | ||||||
| /tests/components/nina/ @DeerMaximum | /tests/components/nina/ @DeerMaximum | ||||||
| /homeassistant/components/nintendo_parental_controls/ @pantherale0 | /homeassistant/components/nintendo_parental/ @pantherale0 | ||||||
| /tests/components/nintendo_parental_controls/ @pantherale0 | /tests/components/nintendo_parental/ @pantherale0 | ||||||
| /homeassistant/components/nissan_leaf/ @filcole | /homeassistant/components/nissan_leaf/ @filcole | ||||||
| /homeassistant/components/noaa_tides/ @jdelaney72 | /homeassistant/components/noaa_tides/ @jdelaney72 | ||||||
| /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe | /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe | ||||||
| @@ -1139,8 +1135,6 @@ build.json @home-assistant/supervisor | |||||||
| /tests/components/opengarage/ @danielhiversen | /tests/components/opengarage/ @danielhiversen | ||||||
| /homeassistant/components/openhome/ @bazwilliams | /homeassistant/components/openhome/ @bazwilliams | ||||||
| /tests/components/openhome/ @bazwilliams | /tests/components/openhome/ @bazwilliams | ||||||
| /homeassistant/components/openrgb/ @felipecrs |  | ||||||
| /tests/components/openrgb/ @felipecrs |  | ||||||
| /homeassistant/components/opensky/ @joostlek | /homeassistant/components/opensky/ @joostlek | ||||||
| /tests/components/opensky/ @joostlek | /tests/components/opensky/ @joostlek | ||||||
| /homeassistant/components/opentherm_gw/ @mvn23 | /homeassistant/components/opentherm_gw/ @mvn23 | ||||||
| @@ -1419,8 +1413,8 @@ build.json @home-assistant/supervisor | |||||||
| /tests/components/sfr_box/ @epenet | /tests/components/sfr_box/ @epenet | ||||||
| /homeassistant/components/sftp_storage/ @maretodoric | /homeassistant/components/sftp_storage/ @maretodoric | ||||||
| /tests/components/sftp_storage/ @maretodoric | /tests/components/sftp_storage/ @maretodoric | ||||||
| /homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre | /homeassistant/components/sharkiq/ @JeffResc @funkybunch | ||||||
| /tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre | /tests/components/sharkiq/ @JeffResc @funkybunch | ||||||
| /homeassistant/components/shell_command/ @home-assistant/core | /homeassistant/components/shell_command/ @home-assistant/core | ||||||
| /tests/components/shell_command/ @home-assistant/core | /tests/components/shell_command/ @home-assistant/core | ||||||
| /homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco | /homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco | ||||||
| @@ -1485,8 +1479,8 @@ build.json @home-assistant/supervisor | |||||||
| /tests/components/snoo/ @Lash-L | /tests/components/snoo/ @Lash-L | ||||||
| /homeassistant/components/snooz/ @AustinBrunkhorst | /homeassistant/components/snooz/ @AustinBrunkhorst | ||||||
| /tests/components/snooz/ @AustinBrunkhorst | /tests/components/snooz/ @AustinBrunkhorst | ||||||
| /homeassistant/components/solaredge/ @frenck @bdraco @tronikos | /homeassistant/components/solaredge/ @frenck @bdraco | ||||||
| /tests/components/solaredge/ @frenck @bdraco @tronikos | /tests/components/solaredge/ @frenck @bdraco | ||||||
| /homeassistant/components/solaredge_local/ @drobtravels @scheric | /homeassistant/components/solaredge_local/ @drobtravels @scheric | ||||||
| /homeassistant/components/solarlog/ @Ernst79 @dontinelli | /homeassistant/components/solarlog/ @Ernst79 @dontinelli | ||||||
| /tests/components/solarlog/ @Ernst79 @dontinelli | /tests/components/solarlog/ @Ernst79 @dontinelli | ||||||
|   | |||||||
| @@ -34,11 +34,9 @@ WORKDIR /usr/src | |||||||
|  |  | ||||||
| COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv | COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv | ||||||
|  |  | ||||||
|  | RUN uv python install 3.13.2 | ||||||
|  |  | ||||||
| USER vscode | USER vscode | ||||||
|  |  | ||||||
| COPY .python-version ./ |  | ||||||
| RUN uv python install |  | ||||||
|  |  | ||||||
| ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv" | ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv" | ||||||
| RUN uv venv $VIRTUAL_ENV | RUN uv venv $VIRTUAL_ENV | ||||||
| ENV PATH="$VIRTUAL_ENV/bin:$PATH" | ENV PATH="$VIRTUAL_ENV/bin:$PATH" | ||||||
|   | |||||||
							
								
								
									
										10
									
								
								build.yaml
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								build.yaml
									
									
									
									
									
								
							| @@ -1,10 +1,10 @@ | |||||||
| image: ghcr.io/home-assistant/{arch}-homeassistant | image: ghcr.io/home-assistant/{arch}-homeassistant | ||||||
| build_from: | build_from: | ||||||
|   aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1 |   aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0 | ||||||
|   armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1 |   armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0 | ||||||
|   armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1 |   armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0 | ||||||
|   amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1 |   amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0 | ||||||
|   i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1 |   i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0 | ||||||
| codenotary: | codenotary: | ||||||
|   signer: notary@home-assistant.io |   signer: notary@home-assistant.io | ||||||
|   base_image: notary@home-assistant.io |   base_image: notary@home-assistant.io | ||||||
|   | |||||||
| @@ -34,9 +34,6 @@ INPUT_FIELD_CODE = "code" | |||||||
|  |  | ||||||
| DUMMY_SECRET = "FPPTH34D4E3MI2HG" | DUMMY_SECRET = "FPPTH34D4E3MI2HG" | ||||||
|  |  | ||||||
| GOOGLE_AUTHENTICATOR_URL = "https://support.google.com/accounts/answer/1066447" |  | ||||||
| AUTHY_URL = "https://authy.com/" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _generate_qr_code(data: str) -> str: | def _generate_qr_code(data: str) -> str: | ||||||
|     """Generate a base64 PNG string represent QR Code image of data.""" |     """Generate a base64 PNG string represent QR Code image of data.""" | ||||||
| @@ -232,8 +229,6 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]): | |||||||
|                 "code": self._ota_secret, |                 "code": self._ota_secret, | ||||||
|                 "url": self._url, |                 "url": self._url, | ||||||
|                 "qr_code": self._image, |                 "qr_code": self._image, | ||||||
|                 "google_authenticator_url": GOOGLE_AUTHENTICATOR_URL, |  | ||||||
|                 "authy_url": AUTHY_URL, |  | ||||||
|             }, |             }, | ||||||
|             errors=errors, |             errors=errors, | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = { | |||||||
| } | } | ||||||
| UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10) | UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10) | ||||||
| UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) | UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) | ||||||
| UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30) | UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30) | ||||||
|   | |||||||
| @@ -1,9 +1,6 @@ | |||||||
| { | { | ||||||
|   "entity": { |   "entity": { | ||||||
|     "sensor": { |     "sensor": { | ||||||
|       "air_quality": { |  | ||||||
|         "default": "mdi:air-filter" |  | ||||||
|       }, |  | ||||||
|       "cloud_ceiling": { |       "cloud_ceiling": { | ||||||
|         "default": "mdi:weather-fog" |         "default": "mdi:weather-fog" | ||||||
|       }, |       }, | ||||||
| @@ -37,6 +34,9 @@ | |||||||
|       "thunderstorm_probability_night": { |       "thunderstorm_probability_night": { | ||||||
|         "default": "mdi:weather-lightning" |         "default": "mdi:weather-lightning" | ||||||
|       }, |       }, | ||||||
|  |       "translation_key": { | ||||||
|  |         "default": "mdi:air-filter" | ||||||
|  |       }, | ||||||
|       "tree_pollen": { |       "tree_pollen": { | ||||||
|         "default": "mdi:tree-outline" |         "default": "mdi:tree-outline" | ||||||
|       }, |       }, | ||||||
|   | |||||||
| @@ -1,57 +0,0 @@ | |||||||
| """The Actron Air integration.""" |  | ||||||
|  |  | ||||||
| from actron_neo_api import ( |  | ||||||
|     ActronAirNeoACSystem, |  | ||||||
|     ActronNeoAPI, |  | ||||||
|     ActronNeoAPIError, |  | ||||||
|     ActronNeoAuthError, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| from homeassistant.const import CONF_API_TOKEN, Platform |  | ||||||
| from homeassistant.core import HomeAssistant |  | ||||||
|  |  | ||||||
| from .const import _LOGGER |  | ||||||
| from .coordinator import ( |  | ||||||
|     ActronAirConfigEntry, |  | ||||||
|     ActronAirRuntimeData, |  | ||||||
|     ActronAirSystemCoordinator, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| PLATFORM = [Platform.CLIMATE] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool: |  | ||||||
|     """Set up Actron Air integration from a config entry.""" |  | ||||||
|  |  | ||||||
|     api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN]) |  | ||||||
|     systems: list[ActronAirNeoACSystem] = [] |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         systems = await api.get_ac_systems() |  | ||||||
|         await api.update_status() |  | ||||||
|     except ActronNeoAuthError: |  | ||||||
|         _LOGGER.error("Authentication error while setting up Actron Air integration") |  | ||||||
|         raise |  | ||||||
|     except ActronNeoAPIError as err: |  | ||||||
|         _LOGGER.error("API error while setting up Actron Air integration: %s", err) |  | ||||||
|         raise |  | ||||||
|  |  | ||||||
|     system_coordinators: dict[str, ActronAirSystemCoordinator] = {} |  | ||||||
|     for system in systems: |  | ||||||
|         coordinator = ActronAirSystemCoordinator(hass, entry, api, system) |  | ||||||
|         _LOGGER.debug("Setting up coordinator for system: %s", system["serial"]) |  | ||||||
|         await coordinator.async_config_entry_first_refresh() |  | ||||||
|         system_coordinators[system["serial"]] = coordinator |  | ||||||
|  |  | ||||||
|     entry.runtime_data = ActronAirRuntimeData( |  | ||||||
|         api=api, |  | ||||||
|         system_coordinators=system_coordinators, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORM) |  | ||||||
|     return True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool: |  | ||||||
|     """Unload a config entry.""" |  | ||||||
|     return await hass.config_entries.async_unload_platforms(entry, PLATFORM) |  | ||||||
| @@ -1,259 +0,0 @@ | |||||||
| """Climate platform for Actron Air integration.""" |  | ||||||
|  |  | ||||||
| from typing import Any |  | ||||||
|  |  | ||||||
| from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone |  | ||||||
|  |  | ||||||
| from homeassistant.components.climate import ( |  | ||||||
|     FAN_AUTO, |  | ||||||
|     FAN_HIGH, |  | ||||||
|     FAN_LOW, |  | ||||||
|     FAN_MEDIUM, |  | ||||||
|     ClimateEntity, |  | ||||||
|     ClimateEntityFeature, |  | ||||||
|     HVACMode, |  | ||||||
| ) |  | ||||||
| from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature |  | ||||||
| from homeassistant.core import HomeAssistant |  | ||||||
| from homeassistant.helpers.device_registry import DeviceInfo |  | ||||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback |  | ||||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity |  | ||||||
|  |  | ||||||
| from .const import DOMAIN |  | ||||||
| from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator |  | ||||||
|  |  | ||||||
| PARALLEL_UPDATES = 0 |  | ||||||
|  |  | ||||||
| FAN_MODE_MAPPING_ACTRONAIR_TO_HA = { |  | ||||||
|     "AUTO": FAN_AUTO, |  | ||||||
|     "LOW": FAN_LOW, |  | ||||||
|     "MED": FAN_MEDIUM, |  | ||||||
|     "HIGH": FAN_HIGH, |  | ||||||
| } |  | ||||||
| FAN_MODE_MAPPING_HA_TO_ACTRONAIR = { |  | ||||||
|     v: k for k, v in FAN_MODE_MAPPING_ACTRONAIR_TO_HA.items() |  | ||||||
| } |  | ||||||
| HVAC_MODE_MAPPING_ACTRONAIR_TO_HA = { |  | ||||||
|     "COOL": HVACMode.COOL, |  | ||||||
|     "HEAT": HVACMode.HEAT, |  | ||||||
|     "FAN": HVACMode.FAN_ONLY, |  | ||||||
|     "AUTO": HVACMode.AUTO, |  | ||||||
|     "OFF": HVACMode.OFF, |  | ||||||
| } |  | ||||||
| HVAC_MODE_MAPPING_HA_TO_ACTRONAIR = { |  | ||||||
|     v: k for k, v in HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.items() |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_entry( |  | ||||||
|     hass: HomeAssistant, |  | ||||||
|     entry: ActronAirConfigEntry, |  | ||||||
|     async_add_entities: AddConfigEntryEntitiesCallback, |  | ||||||
| ) -> None: |  | ||||||
|     """Set up Actron Air climate entities.""" |  | ||||||
|     system_coordinators = entry.runtime_data.system_coordinators |  | ||||||
|     entities: list[ClimateEntity] = [] |  | ||||||
|  |  | ||||||
|     for coordinator in system_coordinators.values(): |  | ||||||
|         status = coordinator.data |  | ||||||
|         name = status.ac_system.system_name |  | ||||||
|         entities.append(ActronSystemClimate(coordinator, name)) |  | ||||||
|  |  | ||||||
|         entities.extend( |  | ||||||
|             ActronZoneClimate(coordinator, zone) |  | ||||||
|             for zone in status.remote_zone_info |  | ||||||
|             if zone.exists |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     async_add_entities(entities) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseClimateEntity(CoordinatorEntity[ActronAirSystemCoordinator], ClimateEntity): |  | ||||||
|     """Base class for Actron Air climate entities.""" |  | ||||||
|  |  | ||||||
|     _attr_has_entity_name = True |  | ||||||
|     _attr_temperature_unit = UnitOfTemperature.CELSIUS |  | ||||||
|     _attr_supported_features = ( |  | ||||||
|         ClimateEntityFeature.TARGET_TEMPERATURE |  | ||||||
|         | ClimateEntityFeature.FAN_MODE |  | ||||||
|         | ClimateEntityFeature.TURN_ON |  | ||||||
|         | ClimateEntityFeature.TURN_OFF |  | ||||||
|     ) |  | ||||||
|     _attr_name = None |  | ||||||
|     _attr_fan_modes = list(FAN_MODE_MAPPING_ACTRONAIR_TO_HA.values()) |  | ||||||
|     _attr_hvac_modes = list(HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.values()) |  | ||||||
|  |  | ||||||
|     def __init__( |  | ||||||
|         self, |  | ||||||
|         coordinator: ActronAirSystemCoordinator, |  | ||||||
|         name: str, |  | ||||||
|     ) -> None: |  | ||||||
|         """Initialize an Actron Air unit.""" |  | ||||||
|         super().__init__(coordinator) |  | ||||||
|         self._serial_number = coordinator.serial_number |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ActronSystemClimate(BaseClimateEntity): |  | ||||||
|     """Representation of the Actron Air system.""" |  | ||||||
|  |  | ||||||
|     _attr_supported_features = ( |  | ||||||
|         ClimateEntityFeature.TARGET_TEMPERATURE |  | ||||||
|         | ClimateEntityFeature.FAN_MODE |  | ||||||
|         | ClimateEntityFeature.TURN_ON |  | ||||||
|         | ClimateEntityFeature.TURN_OFF |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def __init__( |  | ||||||
|         self, |  | ||||||
|         coordinator: ActronAirSystemCoordinator, |  | ||||||
|         name: str, |  | ||||||
|     ) -> None: |  | ||||||
|         """Initialize an Actron Air unit.""" |  | ||||||
|         super().__init__(coordinator, name) |  | ||||||
|         serial_number = coordinator.serial_number |  | ||||||
|         self._attr_unique_id = serial_number |  | ||||||
|         self._attr_device_info = DeviceInfo( |  | ||||||
|             identifiers={(DOMAIN, serial_number)}, |  | ||||||
|             name=self._status.ac_system.system_name, |  | ||||||
|             manufacturer="Actron Air", |  | ||||||
|             model_id=self._status.ac_system.master_wc_model, |  | ||||||
|             sw_version=self._status.ac_system.master_wc_firmware_version, |  | ||||||
|             serial_number=serial_number, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def min_temp(self) -> float: |  | ||||||
|         """Return the minimum temperature that can be set.""" |  | ||||||
|         return self._status.min_temp |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def max_temp(self) -> float: |  | ||||||
|         """Return the maximum temperature that can be set.""" |  | ||||||
|         return self._status.max_temp |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def _status(self) -> ActronAirNeoStatus: |  | ||||||
|         """Get the current status from the coordinator.""" |  | ||||||
|         return self.coordinator.data |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def hvac_mode(self) -> HVACMode | None: |  | ||||||
|         """Return the current HVAC mode.""" |  | ||||||
|         if not self._status.user_aircon_settings.is_on: |  | ||||||
|             return HVACMode.OFF |  | ||||||
|  |  | ||||||
|         mode = self._status.user_aircon_settings.mode |  | ||||||
|         return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def fan_mode(self) -> str | None: |  | ||||||
|         """Return the current fan mode.""" |  | ||||||
|         fan_mode = self._status.user_aircon_settings.fan_mode |  | ||||||
|         return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def current_humidity(self) -> float: |  | ||||||
|         """Return the current humidity.""" |  | ||||||
|         return self._status.master_info.live_humidity_pc |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def current_temperature(self) -> float: |  | ||||||
|         """Return the current temperature.""" |  | ||||||
|         return self._status.master_info.live_temp_c |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def target_temperature(self) -> float: |  | ||||||
|         """Return the target temperature.""" |  | ||||||
|         return self._status.user_aircon_settings.temperature_setpoint_cool_c |  | ||||||
|  |  | ||||||
|     async def async_set_fan_mode(self, fan_mode: str) -> None: |  | ||||||
|         """Set a new fan mode.""" |  | ||||||
|         api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode.lower()) |  | ||||||
|         await self._status.user_aircon_settings.set_fan_mode(api_fan_mode) |  | ||||||
|  |  | ||||||
|     async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: |  | ||||||
|         """Set the HVAC mode.""" |  | ||||||
|         ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode) |  | ||||||
|         await self._status.ac_system.set_system_mode(ac_mode) |  | ||||||
|  |  | ||||||
|     async def async_set_temperature(self, **kwargs: Any) -> None: |  | ||||||
|         """Set the temperature.""" |  | ||||||
|         temp = kwargs.get(ATTR_TEMPERATURE) |  | ||||||
|         await self._status.user_aircon_settings.set_temperature(temperature=temp) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ActronZoneClimate(BaseClimateEntity): |  | ||||||
|     """Representation of a zone within the Actron Air system.""" |  | ||||||
|  |  | ||||||
|     _attr_supported_features = ( |  | ||||||
|         ClimateEntityFeature.TARGET_TEMPERATURE |  | ||||||
|         | ClimateEntityFeature.TURN_ON |  | ||||||
|         | ClimateEntityFeature.TURN_OFF |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def __init__( |  | ||||||
|         self, |  | ||||||
|         coordinator: ActronAirSystemCoordinator, |  | ||||||
|         zone: ActronAirNeoZone, |  | ||||||
|     ) -> None: |  | ||||||
|         """Initialize an Actron Air unit.""" |  | ||||||
|         super().__init__(coordinator, zone.title) |  | ||||||
|         serial_number = coordinator.serial_number |  | ||||||
|         self._zone_id: int = zone.zone_id |  | ||||||
|         self._attr_unique_id: str = f"{serial_number}_zone_{zone.zone_id}" |  | ||||||
|         self._attr_device_info: DeviceInfo = DeviceInfo( |  | ||||||
|             identifiers={(DOMAIN, self._attr_unique_id)}, |  | ||||||
|             name=zone.title, |  | ||||||
|             manufacturer="Actron Air", |  | ||||||
|             model="Zone", |  | ||||||
|             suggested_area=zone.title, |  | ||||||
|             via_device=(DOMAIN, serial_number), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def min_temp(self) -> float: |  | ||||||
|         """Return the minimum temperature that can be set.""" |  | ||||||
|         return self._zone.min_temp |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def max_temp(self) -> float: |  | ||||||
|         """Return the maximum temperature that can be set.""" |  | ||||||
|         return self._zone.max_temp |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def _zone(self) -> ActronAirNeoZone: |  | ||||||
|         """Get the current zone data from the coordinator.""" |  | ||||||
|         status = self.coordinator.data |  | ||||||
|         return status.zones[self._zone_id] |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def hvac_mode(self) -> HVACMode | None: |  | ||||||
|         """Return the current HVAC mode.""" |  | ||||||
|         if self._zone.is_active: |  | ||||||
|             mode = self._zone.hvac_mode |  | ||||||
|             return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode) |  | ||||||
|         return HVACMode.OFF |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def current_humidity(self) -> float | None: |  | ||||||
|         """Return the current humidity.""" |  | ||||||
|         return self._zone.humidity |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def current_temperature(self) -> float | None: |  | ||||||
|         """Return the current temperature.""" |  | ||||||
|         return self._zone.live_temp_c |  | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def target_temperature(self) -> float | None: |  | ||||||
|         """Return the target temperature.""" |  | ||||||
|         return self._zone.temperature_setpoint_cool_c |  | ||||||
|  |  | ||||||
|     async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: |  | ||||||
|         """Set the HVAC mode.""" |  | ||||||
|         is_enabled = hvac_mode != HVACMode.OFF |  | ||||||
|         await self._zone.enable(is_enabled) |  | ||||||
|  |  | ||||||
|     async def async_set_temperature(self, **kwargs: Any) -> None: |  | ||||||
|         """Set the temperature.""" |  | ||||||
|         await self._zone.set_temperature(temperature=kwargs["temperature"]) |  | ||||||
| @@ -1,132 +0,0 @@ | |||||||
| """Setup config flow for Actron Air integration.""" |  | ||||||
|  |  | ||||||
| import asyncio |  | ||||||
| from typing import Any |  | ||||||
|  |  | ||||||
| from actron_neo_api import ActronNeoAPI, ActronNeoAuthError |  | ||||||
|  |  | ||||||
| from homeassistant.config_entries import ConfigFlow, ConfigFlowResult |  | ||||||
| from homeassistant.const import CONF_API_TOKEN |  | ||||||
| from homeassistant.exceptions import HomeAssistantError |  | ||||||
|  |  | ||||||
| from .const import _LOGGER, DOMAIN |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN): |  | ||||||
|     """Handle a config flow for Actron Air.""" |  | ||||||
|  |  | ||||||
|     def __init__(self) -> None: |  | ||||||
|         """Initialize the config flow.""" |  | ||||||
|         self._api: ActronNeoAPI | None = None |  | ||||||
|         self._device_code: str | None = None |  | ||||||
|         self._user_code: str = "" |  | ||||||
|         self._verification_uri: str = "" |  | ||||||
|         self._expires_minutes: str = "30" |  | ||||||
|         self.login_task: asyncio.Task | None = None |  | ||||||
|  |  | ||||||
|     async def async_step_user( |  | ||||||
|         self, user_input: dict[str, Any] | None = None |  | ||||||
|     ) -> ConfigFlowResult: |  | ||||||
|         """Handle the initial step.""" |  | ||||||
|         if self._api is None: |  | ||||||
|             _LOGGER.debug("Initiating device authorization") |  | ||||||
|             self._api = ActronNeoAPI() |  | ||||||
|             try: |  | ||||||
|                 device_code_response = await self._api.request_device_code() |  | ||||||
|             except ActronNeoAuthError as err: |  | ||||||
|                 _LOGGER.error("OAuth2 flow failed: %s", err) |  | ||||||
|                 return self.async_abort(reason="oauth2_error") |  | ||||||
|  |  | ||||||
|             self._device_code = device_code_response["device_code"] |  | ||||||
|             self._user_code = device_code_response["user_code"] |  | ||||||
|             self._verification_uri = device_code_response["verification_uri_complete"] |  | ||||||
|             self._expires_minutes = str(device_code_response["expires_in"] // 60) |  | ||||||
|  |  | ||||||
|         async def _wait_for_authorization() -> None: |  | ||||||
|             """Wait for the user to authorize the device.""" |  | ||||||
|             assert self._api is not None |  | ||||||
|             assert self._device_code is not None |  | ||||||
|             _LOGGER.debug("Waiting for device authorization") |  | ||||||
|             try: |  | ||||||
|                 await self._api.poll_for_token(self._device_code) |  | ||||||
|                 _LOGGER.debug("Authorization successful") |  | ||||||
|             except ActronNeoAuthError as ex: |  | ||||||
|                 _LOGGER.exception("Error while waiting for device authorization") |  | ||||||
|                 raise CannotConnect from ex |  | ||||||
|  |  | ||||||
|         _LOGGER.debug("Checking login task") |  | ||||||
|         if self.login_task is None: |  | ||||||
|             _LOGGER.debug("Creating task for device authorization") |  | ||||||
|             self.login_task = self.hass.async_create_task(_wait_for_authorization()) |  | ||||||
|  |  | ||||||
|         if self.login_task.done(): |  | ||||||
|             _LOGGER.debug("Login task is done, checking results") |  | ||||||
|             if exception := self.login_task.exception(): |  | ||||||
|                 if isinstance(exception, CannotConnect): |  | ||||||
|                     return self.async_show_progress_done( |  | ||||||
|                         next_step_id="connection_error" |  | ||||||
|                     ) |  | ||||||
|                 return self.async_show_progress_done(next_step_id="timeout") |  | ||||||
|             return self.async_show_progress_done(next_step_id="finish_login") |  | ||||||
|  |  | ||||||
|         return self.async_show_progress( |  | ||||||
|             step_id="user", |  | ||||||
|             progress_action="wait_for_authorization", |  | ||||||
|             description_placeholders={ |  | ||||||
|                 "user_code": self._user_code, |  | ||||||
|                 "verification_uri": self._verification_uri, |  | ||||||
|                 "expires_minutes": self._expires_minutes, |  | ||||||
|             }, |  | ||||||
|             progress_task=self.login_task, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     async def async_step_finish_login( |  | ||||||
|         self, user_input: dict[str, Any] | None = None |  | ||||||
|     ) -> ConfigFlowResult: |  | ||||||
|         """Handle the finalization of login.""" |  | ||||||
|         _LOGGER.debug("Finalizing authorization") |  | ||||||
|         assert self._api is not None |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             user_data = await self._api.get_user_info() |  | ||||||
|         except ActronNeoAuthError as err: |  | ||||||
|             _LOGGER.error("Error getting user info: %s", err) |  | ||||||
|             return self.async_abort(reason="oauth2_error") |  | ||||||
|  |  | ||||||
|         unique_id = str(user_data["id"]) |  | ||||||
|         await self.async_set_unique_id(unique_id) |  | ||||||
|         self._abort_if_unique_id_configured() |  | ||||||
|  |  | ||||||
|         return self.async_create_entry( |  | ||||||
|             title=user_data["email"], |  | ||||||
|             data={CONF_API_TOKEN: self._api.refresh_token_value}, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     async def async_step_timeout( |  | ||||||
|         self, |  | ||||||
|         user_input: dict[str, Any] | None = None, |  | ||||||
|     ) -> ConfigFlowResult: |  | ||||||
|         """Handle issues that need transition await from progress step.""" |  | ||||||
|         if user_input is None: |  | ||||||
|             return self.async_show_form( |  | ||||||
|                 step_id="timeout", |  | ||||||
|             ) |  | ||||||
|         del self.login_task |  | ||||||
|         return await self.async_step_user() |  | ||||||
|  |  | ||||||
|     async def async_step_connection_error( |  | ||||||
|         self, user_input: dict[str, Any] | None = None |  | ||||||
|     ) -> ConfigFlowResult: |  | ||||||
|         """Handle connection error from progress step.""" |  | ||||||
|         if user_input is None: |  | ||||||
|             return self.async_show_form(step_id="connection_error") |  | ||||||
|  |  | ||||||
|         # Reset state and try again |  | ||||||
|         self._api = None |  | ||||||
|         self._device_code = None |  | ||||||
|         self.login_task = None |  | ||||||
|         return await self.async_step_user() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class CannotConnect(HomeAssistantError): |  | ||||||
|     """Error to indicate we cannot connect.""" |  | ||||||
| @@ -1,6 +0,0 @@ | |||||||
| """Constants used by Actron Air integration.""" |  | ||||||
|  |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__package__) |  | ||||||
| DOMAIN = "actron_air" |  | ||||||
| @@ -1,69 +0,0 @@ | |||||||
| """Coordinator for Actron Air integration.""" |  | ||||||
|  |  | ||||||
| from __future__ import annotations |  | ||||||
|  |  | ||||||
| from dataclasses import dataclass |  | ||||||
| from datetime import timedelta |  | ||||||
|  |  | ||||||
| from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI |  | ||||||
|  |  | ||||||
| from homeassistant.config_entries import ConfigEntry |  | ||||||
| from homeassistant.core import HomeAssistant |  | ||||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator |  | ||||||
| from homeassistant.util import dt as dt_util |  | ||||||
|  |  | ||||||
| from .const import _LOGGER |  | ||||||
|  |  | ||||||
| STALE_DEVICE_TIMEOUT = timedelta(hours=24) |  | ||||||
| ERROR_NO_SYSTEMS_FOUND = "no_systems_found" |  | ||||||
| ERROR_UNKNOWN = "unknown_error" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass |  | ||||||
| class ActronAirRuntimeData: |  | ||||||
|     """Runtime data for the Actron Air integration.""" |  | ||||||
|  |  | ||||||
|     api: ActronNeoAPI |  | ||||||
|     system_coordinators: dict[str, ActronAirSystemCoordinator] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData] |  | ||||||
|  |  | ||||||
| AUTH_ERROR_THRESHOLD = 3 |  | ||||||
| SCAN_INTERVAL = timedelta(seconds=30) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]): |  | ||||||
|     """System coordinator for Actron Air integration.""" |  | ||||||
|  |  | ||||||
|     def __init__( |  | ||||||
|         self, |  | ||||||
|         hass: HomeAssistant, |  | ||||||
|         entry: ActronAirConfigEntry, |  | ||||||
|         api: ActronNeoAPI, |  | ||||||
|         system: ActronAirNeoACSystem, |  | ||||||
|     ) -> None: |  | ||||||
|         """Initialize the coordinator.""" |  | ||||||
|         super().__init__( |  | ||||||
|             hass, |  | ||||||
|             _LOGGER, |  | ||||||
|             name="Actron Air Status", |  | ||||||
|             update_interval=SCAN_INTERVAL, |  | ||||||
|             config_entry=entry, |  | ||||||
|         ) |  | ||||||
|         self.system = system |  | ||||||
|         self.serial_number = system["serial"] |  | ||||||
|         self.api = api |  | ||||||
|         self.status = self.api.state_manager.get_status(self.serial_number) |  | ||||||
|         self.last_seen = dt_util.utcnow() |  | ||||||
|  |  | ||||||
|     async def _async_update_data(self) -> ActronAirNeoStatus: |  | ||||||
|         """Fetch updates and merge incremental changes into the full state.""" |  | ||||||
|         await self.api.update_status() |  | ||||||
|         self.status = self.api.state_manager.get_status(self.serial_number) |  | ||||||
|         self.last_seen = dt_util.utcnow() |  | ||||||
|         return self.status |  | ||||||
|  |  | ||||||
|     def is_device_stale(self) -> bool: |  | ||||||
|         """Check if a device is stale (not seen for a while).""" |  | ||||||
|         return (dt_util.utcnow() - self.last_seen) > STALE_DEVICE_TIMEOUT |  | ||||||
| @@ -1,16 +0,0 @@ | |||||||
| { |  | ||||||
|   "domain": "actron_air", |  | ||||||
|   "name": "Actron Air", |  | ||||||
|   "codeowners": ["@kclif9", "@JagadishDhanamjayam"], |  | ||||||
|   "config_flow": true, |  | ||||||
|   "dhcp": [ |  | ||||||
|     { |  | ||||||
|       "hostname": "neo-*", |  | ||||||
|       "macaddress": "FC0FE7*" |  | ||||||
|     } |  | ||||||
|   ], |  | ||||||
|   "documentation": "https://www.home-assistant.io/integrations/actron_air", |  | ||||||
|   "iot_class": "cloud_polling", |  | ||||||
|   "quality_scale": "bronze", |  | ||||||
|   "requirements": ["actron-neo-api==0.1.84"] |  | ||||||
| } |  | ||||||
| @@ -1,78 +0,0 @@ | |||||||
| rules: |  | ||||||
|   # Bronze |  | ||||||
|   action-setup: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration does not have custom service actions. |  | ||||||
|   appropriate-polling: done |  | ||||||
|   brands: done |  | ||||||
|   common-modules: done |  | ||||||
|   config-flow-test-coverage: done |  | ||||||
|   config-flow: done |  | ||||||
|   dependency-transparency: done |  | ||||||
|   docs-actions: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration does not have custom service actions. |  | ||||||
|   docs-high-level-description: done |  | ||||||
|   docs-installation-instructions: done |  | ||||||
|   docs-removal-instructions: done |  | ||||||
|   entity-event-setup: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration does not subscribe to external events. |  | ||||||
|   entity-unique-id: done |  | ||||||
|   has-entity-name: done |  | ||||||
|   runtime-data: done |  | ||||||
|   test-before-configure: done |  | ||||||
|   test-before-setup: done |  | ||||||
|   unique-config-entry: done |  | ||||||
|  |  | ||||||
|   # Silver |  | ||||||
|   action-exceptions: todo |  | ||||||
|   config-entry-unloading: done |  | ||||||
|   docs-configuration-parameters: |  | ||||||
|     status: exempt |  | ||||||
|     comment: No options flow |  | ||||||
|   docs-installation-parameters: done |  | ||||||
|   entity-unavailable: done |  | ||||||
|   integration-owner: done |  | ||||||
|   log-when-unavailable: done |  | ||||||
|   parallel-updates: done |  | ||||||
|   reauthentication-flow: todo |  | ||||||
|   test-coverage: todo |  | ||||||
|  |  | ||||||
|   # Gold |  | ||||||
|   devices: done |  | ||||||
|   diagnostics: todo |  | ||||||
|   discovery-update-info: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration uses DHCP discovery, however is cloud polling. Therefore there is no information to update. |  | ||||||
|   discovery: done |  | ||||||
|   docs-data-update: done |  | ||||||
|   docs-examples: done |  | ||||||
|   docs-known-limitations: done |  | ||||||
|   docs-supported-devices: done |  | ||||||
|   docs-supported-functions: done |  | ||||||
|   docs-troubleshooting: done |  | ||||||
|   docs-use-cases: done |  | ||||||
|   dynamic-devices: todo |  | ||||||
|   entity-category: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration does not use entity categories. |  | ||||||
|   entity-device-class: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration does not use entity device classes. |  | ||||||
|   entity-disabled-by-default: |  | ||||||
|     status: exempt |  | ||||||
|     comment: Not required for this integration at this stage. |  | ||||||
|   entity-translations: todo |  | ||||||
|   exception-translations: todo |  | ||||||
|   icon-translations: todo |  | ||||||
|   reconfiguration-flow: todo |  | ||||||
|   repair-issues: |  | ||||||
|     status: exempt |  | ||||||
|     comment: This integration does not have any known issues that require repair. |  | ||||||
|   stale-devices: todo |  | ||||||
|  |  | ||||||
|   # Platinum |  | ||||||
|   async-dependency: done |  | ||||||
|   inject-websession: todo |  | ||||||
|   strict-typing: todo |  | ||||||
| @@ -1,29 +0,0 @@ | |||||||
| { |  | ||||||
|   "config": { |  | ||||||
|     "step": { |  | ||||||
|       "user": { |  | ||||||
|         "title": "Actron Air OAuth2 Authorization" |  | ||||||
|       }, |  | ||||||
|       "timeout": { |  | ||||||
|         "title": "Authorization timeout", |  | ||||||
|         "description": "The authorization process timed out. Please try again.", |  | ||||||
|         "data": {} |  | ||||||
|       }, |  | ||||||
|       "connection_error": { |  | ||||||
|         "title": "Connection error", |  | ||||||
|         "description": "Failed to connect to Actron Air. Please check your internet connection and try again.", |  | ||||||
|         "data": {} |  | ||||||
|       } |  | ||||||
|     }, |  | ||||||
|     "progress": { |  | ||||||
|       "wait_for_authorization": "To authenticate, open the following URL and login at Actron Air:\n{verification_uri}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{user_code}```\n\n\nThe login attempt will time out after {expires_minutes} minutes." |  | ||||||
|     }, |  | ||||||
|     "error": { |  | ||||||
|       "oauth2_error": "Failed to start OAuth2 flow. Please try again later." |  | ||||||
|     }, |  | ||||||
|     "abort": { |  | ||||||
|       "oauth2_error": "Failed to start OAuth2 flow", |  | ||||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" |  | ||||||
|     } |  | ||||||
|   } |  | ||||||
| } |  | ||||||
| @@ -6,5 +6,5 @@ | |||||||
|   "documentation": "https://www.home-assistant.io/integrations/adax", |   "documentation": "https://www.home-assistant.io/integrations/adax", | ||||||
|   "iot_class": "local_polling", |   "iot_class": "local_polling", | ||||||
|   "loggers": ["adax", "adax_local"], |   "loggers": ["adax", "adax_local"], | ||||||
|   "requirements": ["adax==0.4.0", "Adax-local==0.2.0"] |   "requirements": ["adax==0.4.0", "Adax-local==0.1.5"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -71,14 +71,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|             } |             } | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         return self.async_show_form( |         return self.async_show_form(step_id="user", data_schema=schema, errors=errors) | ||||||
|             step_id="user", |  | ||||||
|             data_schema=schema, |  | ||||||
|             errors=errors, |  | ||||||
|             description_placeholders={ |  | ||||||
|                 "api_key_url": "https://opendata.aemet.es/centrodedescargas/altaUsuario" |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     @callback |     @callback | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ | |||||||
|           "longitude": "[%key:common::config_flow::data::longitude%]", |           "longitude": "[%key:common::config_flow::data::longitude%]", | ||||||
|           "name": "Name of the integration" |           "name": "Name of the integration" | ||||||
|         }, |         }, | ||||||
|         "description": "To generate API key go to {api_key_url}" |         "description": "To generate API key go to https://opendata.aemet.es/centrodedescargas/altaUsuario" | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   }, |   }, | ||||||
|   | |||||||
| @@ -53,6 +53,9 @@ __all__ = [ | |||||||
|     "GenImageTaskResult", |     "GenImageTaskResult", | ||||||
|     "async_generate_data", |     "async_generate_data", | ||||||
|     "async_generate_image", |     "async_generate_image", | ||||||
|  |     "async_setup", | ||||||
|  |     "async_setup_entry", | ||||||
|  |     "async_unload_entry", | ||||||
| ] | ] | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|   | |||||||
| @@ -1,9 +1,7 @@ | |||||||
| """Airgradient Update platform.""" | """Airgradient Update platform.""" | ||||||
|  |  | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| import logging |  | ||||||
|  |  | ||||||
| from airgradient import AirGradientConnectionError |  | ||||||
| from propcache.api import cached_property | from propcache.api import cached_property | ||||||
|  |  | ||||||
| from homeassistant.components.update import UpdateDeviceClass, UpdateEntity | from homeassistant.components.update import UpdateDeviceClass, UpdateEntity | ||||||
| @@ -15,7 +13,6 @@ from .entity import AirGradientEntity | |||||||
|  |  | ||||||
| PARALLEL_UPDATES = 1 | PARALLEL_UPDATES = 1 | ||||||
| SCAN_INTERVAL = timedelta(hours=1) | SCAN_INTERVAL = timedelta(hours=1) | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_entry( | async def async_setup_entry( | ||||||
| @@ -34,7 +31,6 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): | |||||||
|     """Representation of Airgradient Update.""" |     """Representation of Airgradient Update.""" | ||||||
|  |  | ||||||
|     _attr_device_class = UpdateDeviceClass.FIRMWARE |     _attr_device_class = UpdateDeviceClass.FIRMWARE | ||||||
|     _server_unreachable_logged = False |  | ||||||
|  |  | ||||||
|     def __init__(self, coordinator: AirGradientCoordinator) -> None: |     def __init__(self, coordinator: AirGradientCoordinator) -> None: | ||||||
|         """Initialize the entity.""" |         """Initialize the entity.""" | ||||||
| @@ -51,27 +47,10 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): | |||||||
|         """Return the installed version of the entity.""" |         """Return the installed version of the entity.""" | ||||||
|         return self.coordinator.data.measures.firmware_version |         return self.coordinator.data.measures.firmware_version | ||||||
|  |  | ||||||
|     @property |  | ||||||
|     def available(self) -> bool: |  | ||||||
|         """Return if entity is available.""" |  | ||||||
|         return super().available and self._attr_available |  | ||||||
|  |  | ||||||
|     async def async_update(self) -> None: |     async def async_update(self) -> None: | ||||||
|         """Update the entity.""" |         """Update the entity.""" | ||||||
|         try: |  | ||||||
|         self._attr_latest_version = ( |         self._attr_latest_version = ( | ||||||
|             await self.coordinator.client.get_latest_firmware_version( |             await self.coordinator.client.get_latest_firmware_version( | ||||||
|                 self.coordinator.serial_number |                 self.coordinator.serial_number | ||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|         except AirGradientConnectionError: |  | ||||||
|             self._attr_latest_version = None |  | ||||||
|             self._attr_available = False |  | ||||||
|             if not self._server_unreachable_logged: |  | ||||||
|                 _LOGGER.error( |  | ||||||
|                     "Unable to connect to AirGradient server to check for updates" |  | ||||||
|                 ) |  | ||||||
|                 self._server_unreachable_logged = True |  | ||||||
|         else: |  | ||||||
|             self._server_unreachable_logged = False |  | ||||||
|             self._attr_available = True |  | ||||||
|   | |||||||
| @@ -18,10 +18,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession | |||||||
|  |  | ||||||
| from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS | from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS | ||||||
|  |  | ||||||
| DESCRIPTION_PLACEHOLDERS = { |  | ||||||
|     "developer_registration_url": "https://developer.airly.eu/register", |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AirlyFlowHandler(ConfigFlow, domain=DOMAIN): | class AirlyFlowHandler(ConfigFlow, domain=DOMAIN): | ||||||
|     """Config flow for Airly.""" |     """Config flow for Airly.""" | ||||||
| @@ -89,7 +85,6 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN): | |||||||
|                 } |                 } | ||||||
|             ), |             ), | ||||||
|             errors=errors, |             errors=errors, | ||||||
|             description_placeholders=DESCRIPTION_PLACEHOLDERS, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ | |||||||
|   "config": { |   "config": { | ||||||
|     "step": { |     "step": { | ||||||
|       "user": { |       "user": { | ||||||
|         "description": "To generate API key go to {developer_registration_url}", |         "description": "To generate API key go to https://developer.airly.eu/register", | ||||||
|         "data": { |         "data": { | ||||||
|           "name": "[%key:common::config_flow::data::name%]", |           "name": "[%key:common::config_flow::data::name%]", | ||||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", |           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||||
|   | |||||||
| @@ -26,10 +26,6 @@ from .const import DOMAIN | |||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
| # Documentation URL for API key generation |  | ||||||
| _API_KEY_URL = "https://docs.airnowapi.org/account/request/" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool: | async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool: | ||||||
|     """Validate the user input allows us to connect. |     """Validate the user input allows us to connect. | ||||||
|  |  | ||||||
| @@ -118,7 +114,6 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|                     ), |                     ), | ||||||
|                 } |                 } | ||||||
|             ), |             ), | ||||||
|             description_placeholders={"api_key_url": _API_KEY_URL}, |  | ||||||
|             errors=errors, |             errors=errors, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ | |||||||
|   "config": { |   "config": { | ||||||
|     "step": { |     "step": { | ||||||
|       "user": { |       "user": { | ||||||
|         "description": "To generate API key go to {api_key_url}", |         "description": "To generate API key go to https://docs.airnowapi.org/account/request/", | ||||||
|         "data": { |         "data": { | ||||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", |           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||||
|           "latitude": "[%key:common::config_flow::data::latitude%]", |           "latitude": "[%key:common::config_flow::data::latitude%]", | ||||||
|   | |||||||
| @@ -2,8 +2,6 @@ | |||||||
|  |  | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
| import logging |  | ||||||
|  |  | ||||||
| from airos.airos8 import AirOS8 | from airos.airos8 import AirOS8 | ||||||
|  |  | ||||||
| from homeassistant.const import ( | from homeassistant.const import ( | ||||||
| @@ -14,11 +12,10 @@ from homeassistant.const import ( | |||||||
|     CONF_VERIFY_SSL, |     CONF_VERIFY_SSL, | ||||||
|     Platform, |     Platform, | ||||||
| ) | ) | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant | ||||||
| from homeassistant.helpers import device_registry as dr, entity_registry as er |  | ||||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||||
|  |  | ||||||
| from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS | from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS | ||||||
| from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator | from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator | ||||||
|  |  | ||||||
| _PLATFORMS: list[Platform] = [ | _PLATFORMS: list[Platform] = [ | ||||||
| @@ -26,8 +23,6 @@ _PLATFORMS: list[Platform] = [ | |||||||
|     Platform.SENSOR, |     Platform.SENSOR, | ||||||
| ] | ] | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||||
|     """Set up Ubiquiti airOS from a config entry.""" |     """Set up Ubiquiti airOS from a config entry.""" | ||||||
| @@ -59,13 +54,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo | |||||||
| async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||||
|     """Migrate old config entry.""" |     """Migrate old config entry.""" | ||||||
|  |  | ||||||
|  |     if entry.version > 1: | ||||||
|         # This means the user has downgraded from a future version |         # This means the user has downgraded from a future version | ||||||
|     if entry.version > 2: |  | ||||||
|         return False |         return False | ||||||
|  |  | ||||||
|     # 1.1 Migrate config_entry to add advanced ssl settings |  | ||||||
|     if entry.version == 1 and entry.minor_version == 1: |     if entry.version == 1 and entry.minor_version == 1: | ||||||
|         new_minor_version = 2 |  | ||||||
|         new_data = {**entry.data} |         new_data = {**entry.data} | ||||||
|         advanced_data = { |         advanced_data = { | ||||||
|             CONF_SSL: DEFAULT_SSL, |             CONF_SSL: DEFAULT_SSL, | ||||||
| @@ -76,52 +69,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> b | |||||||
|         hass.config_entries.async_update_entry( |         hass.config_entries.async_update_entry( | ||||||
|             entry, |             entry, | ||||||
|             data=new_data, |             data=new_data, | ||||||
|             minor_version=new_minor_version, |             minor_version=2, | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     # 2.1 Migrate binary_sensor entity unique_id from device_id to mac_address |  | ||||||
|     #     Step 1 - migrate binary_sensor entity unique_id |  | ||||||
|     #     Step 2 - migrate device entity identifier |  | ||||||
|     if entry.version == 1: |  | ||||||
|         new_version = 2 |  | ||||||
|         new_minor_version = 1 |  | ||||||
|  |  | ||||||
|         mac_adress = dr.format_mac(entry.unique_id) |  | ||||||
|  |  | ||||||
|         device_registry = dr.async_get(hass) |  | ||||||
|         if device_entry := device_registry.async_get_device( |  | ||||||
|             connections={(dr.CONNECTION_NETWORK_MAC, mac_adress)} |  | ||||||
|         ): |  | ||||||
|             old_device_id = next( |  | ||||||
|                 ( |  | ||||||
|                     device_id |  | ||||||
|                     for domain, device_id in device_entry.identifiers |  | ||||||
|                     if domain == DOMAIN |  | ||||||
|                 ), |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             @callback |  | ||||||
|             def update_unique_id( |  | ||||||
|                 entity_entry: er.RegistryEntry, |  | ||||||
|             ) -> dict[str, str] | None: |  | ||||||
|                 """Update unique id from device_id to mac address.""" |  | ||||||
|                 if old_device_id and entity_entry.unique_id.startswith(old_device_id): |  | ||||||
|                     suffix = entity_entry.unique_id.removeprefix(old_device_id) |  | ||||||
|                     new_unique_id = f"{mac_adress}{suffix}" |  | ||||||
|                     return {"new_unique_id": new_unique_id} |  | ||||||
|                 return None |  | ||||||
|  |  | ||||||
|             await er.async_migrate_entries(hass, entry.entry_id, update_unique_id) |  | ||||||
|  |  | ||||||
|             new_identifiers = device_entry.identifiers.copy() |  | ||||||
|             new_identifiers.discard((DOMAIN, old_device_id)) |  | ||||||
|             new_identifiers.add((DOMAIN, mac_adress)) |  | ||||||
|             device_registry.async_update_device( |  | ||||||
|                 device_entry.id, new_identifiers=new_identifiers |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         hass.config_entries.async_update_entry( |  | ||||||
|             entry, version=new_version, minor_version=new_minor_version |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     return True |     return True | ||||||
|   | |||||||
| @@ -98,7 +98,7 @@ class AirOSBinarySensor(AirOSEntity, BinarySensorEntity): | |||||||
|         super().__init__(coordinator) |         super().__init__(coordinator) | ||||||
|  |  | ||||||
|         self.entity_description = description |         self.entity_description = description | ||||||
|         self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}" |         self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}" | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def is_on(self) -> bool: |     def is_on(self) -> bool: | ||||||
|   | |||||||
| @@ -15,12 +15,7 @@ from airos.exceptions import ( | |||||||
| ) | ) | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from homeassistant.config_entries import ( | from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult | ||||||
|     SOURCE_REAUTH, |  | ||||||
|     SOURCE_RECONFIGURE, |  | ||||||
|     ConfigFlow, |  | ||||||
|     ConfigFlowResult, |  | ||||||
| ) |  | ||||||
| from homeassistant.const import ( | from homeassistant.const import ( | ||||||
|     CONF_HOST, |     CONF_HOST, | ||||||
|     CONF_PASSWORD, |     CONF_PASSWORD, | ||||||
| @@ -62,8 +57,8 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | |||||||
| class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | ||||||
|     """Handle a config flow for Ubiquiti airOS.""" |     """Handle a config flow for Ubiquiti airOS.""" | ||||||
|  |  | ||||||
|     VERSION = 2 |     VERSION = 1 | ||||||
|     MINOR_VERSION = 1 |     MINOR_VERSION = 2 | ||||||
|  |  | ||||||
|     def __init__(self) -> None: |     def __init__(self) -> None: | ||||||
|         """Initialize the config flow.""" |         """Initialize the config flow.""" | ||||||
| @@ -124,7 +119,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|         else: |         else: | ||||||
|             await self.async_set_unique_id(airos_data.derived.mac) |             await self.async_set_unique_id(airos_data.derived.mac) | ||||||
|  |  | ||||||
|             if self.source in [SOURCE_REAUTH, SOURCE_RECONFIGURE]: |             if self.source == SOURCE_REAUTH: | ||||||
|                 self._abort_if_unique_id_mismatch() |                 self._abort_if_unique_id_mismatch() | ||||||
|             else: |             else: | ||||||
|                 self._abort_if_unique_id_configured() |                 self._abort_if_unique_id_configured() | ||||||
| @@ -169,54 +164,3 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|             ), |             ), | ||||||
|             errors=self.errors, |             errors=self.errors, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     async def async_step_reconfigure( |  | ||||||
|         self, |  | ||||||
|         user_input: Mapping[str, Any] | None = None, |  | ||||||
|     ) -> ConfigFlowResult: |  | ||||||
|         """Handle reconfiguration of airOS.""" |  | ||||||
|         self.errors = {} |  | ||||||
|         entry = self._get_reconfigure_entry() |  | ||||||
|         current_data = entry.data |  | ||||||
|  |  | ||||||
|         if user_input is not None: |  | ||||||
|             validate_data = {**current_data, **user_input} |  | ||||||
|             if await self._validate_and_get_device_info(config_data=validate_data): |  | ||||||
|                 return self.async_update_reload_and_abort( |  | ||||||
|                     entry, |  | ||||||
|                     data_updates=validate_data, |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         return self.async_show_form( |  | ||||||
|             step_id="reconfigure", |  | ||||||
|             data_schema=vol.Schema( |  | ||||||
|                 { |  | ||||||
|                     vol.Required(CONF_PASSWORD): TextSelector( |  | ||||||
|                         TextSelectorConfig( |  | ||||||
|                             type=TextSelectorType.PASSWORD, |  | ||||||
|                             autocomplete="current-password", |  | ||||||
|                         ) |  | ||||||
|                     ), |  | ||||||
|                     vol.Required(SECTION_ADVANCED_SETTINGS): section( |  | ||||||
|                         vol.Schema( |  | ||||||
|                             { |  | ||||||
|                                 vol.Required( |  | ||||||
|                                     CONF_SSL, |  | ||||||
|                                     default=current_data[SECTION_ADVANCED_SETTINGS][ |  | ||||||
|                                         CONF_SSL |  | ||||||
|                                     ], |  | ||||||
|                                 ): bool, |  | ||||||
|                                 vol.Required( |  | ||||||
|                                     CONF_VERIFY_SSL, |  | ||||||
|                                     default=current_data[SECTION_ADVANCED_SETTINGS][ |  | ||||||
|                                         CONF_VERIFY_SSL |  | ||||||
|                                     ], |  | ||||||
|                                 ): bool, |  | ||||||
|                             } |  | ||||||
|                         ), |  | ||||||
|                         {"collapsed": True}, |  | ||||||
|                     ), |  | ||||||
|                 } |  | ||||||
|             ), |  | ||||||
|             errors=self.errors, |  | ||||||
|         ) |  | ||||||
|   | |||||||
| @@ -33,14 +33,9 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]): | |||||||
|         self._attr_device_info = DeviceInfo( |         self._attr_device_info = DeviceInfo( | ||||||
|             connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)}, |             connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)}, | ||||||
|             configuration_url=configuration_url, |             configuration_url=configuration_url, | ||||||
|             identifiers={(DOMAIN, airos_data.derived.mac)}, |             identifiers={(DOMAIN, str(airos_data.host.device_id))}, | ||||||
|             manufacturer=MANUFACTURER, |             manufacturer=MANUFACTURER, | ||||||
|             model=airos_data.host.devmodel, |             model=airos_data.host.devmodel, | ||||||
|             model_id=( |  | ||||||
|                 sku |  | ||||||
|                 if (sku := airos_data.derived.sku) not in ["UNKNOWN", "AMBIGUOUS"] |  | ||||||
|                 else None |  | ||||||
|             ), |  | ||||||
|             name=airos_data.host.hostname, |             name=airos_data.host.hostname, | ||||||
|             sw_version=airos_data.host.fwversion, |             sw_version=airos_data.host.fwversion, | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -4,8 +4,7 @@ | |||||||
|   "codeowners": ["@CoMPaTech"], |   "codeowners": ["@CoMPaTech"], | ||||||
|   "config_flow": true, |   "config_flow": true, | ||||||
|   "documentation": "https://www.home-assistant.io/integrations/airos", |   "documentation": "https://www.home-assistant.io/integrations/airos", | ||||||
|   "integration_type": "device", |  | ||||||
|   "iot_class": "local_polling", |   "iot_class": "local_polling", | ||||||
|   "quality_scale": "silver", |   "quality_scale": "bronze", | ||||||
|   "requirements": ["airos==0.5.6"] |   "requirements": ["airos==0.5.5"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -32,11 +32,11 @@ rules: | |||||||
|   config-entry-unloading: done |   config-entry-unloading: done | ||||||
|   docs-configuration-parameters: done |   docs-configuration-parameters: done | ||||||
|   docs-installation-parameters: done |   docs-installation-parameters: done | ||||||
|   entity-unavailable: done |   entity-unavailable: todo | ||||||
|   integration-owner: done |   integration-owner: done | ||||||
|   log-when-unavailable: done |   log-when-unavailable: todo | ||||||
|   parallel-updates: done |   parallel-updates: todo | ||||||
|   reauthentication-flow: done |   reauthentication-flow: todo | ||||||
|   test-coverage: done |   test-coverage: done | ||||||
|  |  | ||||||
|   # Gold |   # Gold | ||||||
| @@ -48,9 +48,9 @@ rules: | |||||||
|   docs-examples: todo |   docs-examples: todo | ||||||
|   docs-known-limitations: done |   docs-known-limitations: done | ||||||
|   docs-supported-devices: done |   docs-supported-devices: done | ||||||
|   docs-supported-functions: done |   docs-supported-functions: todo | ||||||
|   docs-troubleshooting: done |   docs-troubleshooting: done | ||||||
|   docs-use-cases: done |   docs-use-cases: todo | ||||||
|   dynamic-devices: todo |   dynamic-devices: todo | ||||||
|   entity-category: done |   entity-category: done | ||||||
|   entity-device-class: done |   entity-device-class: done | ||||||
| @@ -60,7 +60,7 @@ rules: | |||||||
|   icon-translations: |   icon-translations: | ||||||
|     status: exempt |     status: exempt | ||||||
|     comment: no (custom) icons used or envisioned |     comment: no (custom) icons used or envisioned | ||||||
|   reconfiguration-flow: done |   reconfiguration-flow: todo | ||||||
|   repair-issues: todo |   repair-issues: todo | ||||||
|   stale-devices: todo |   stale-devices: todo | ||||||
|  |  | ||||||
|   | |||||||
| @@ -10,27 +10,6 @@ | |||||||
|           "password": "[%key:component::airos::config::step::user::data_description::password%]" |           "password": "[%key:component::airos::config::step::user::data_description::password%]" | ||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "reconfigure": { |  | ||||||
|         "data": { |  | ||||||
|           "password": "[%key:common::config_flow::data::password%]" |  | ||||||
|         }, |  | ||||||
|         "data_description": { |  | ||||||
|           "password": "[%key:component::airos::config::step::user::data_description::password%]" |  | ||||||
|         }, |  | ||||||
|         "sections": { |  | ||||||
|           "advanced_settings": { |  | ||||||
|             "name": "[%key:component::airos::config::step::user::sections::advanced_settings::name%]", |  | ||||||
|             "data": { |  | ||||||
|               "ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data::ssl%]", |  | ||||||
|               "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" |  | ||||||
|             }, |  | ||||||
|             "data_description": { |  | ||||||
|               "ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::ssl%]", |  | ||||||
|               "verify_ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::verify_ssl%]" |  | ||||||
|             } |  | ||||||
|           } |  | ||||||
|         } |  | ||||||
|       }, |  | ||||||
|       "user": { |       "user": { | ||||||
|         "data": { |         "data": { | ||||||
|           "host": "[%key:common::config_flow::data::host%]", |           "host": "[%key:common::config_flow::data::host%]", | ||||||
| @@ -44,7 +23,6 @@ | |||||||
|         }, |         }, | ||||||
|         "sections": { |         "sections": { | ||||||
|           "advanced_settings": { |           "advanced_settings": { | ||||||
|             "name": "Advanced settings", |  | ||||||
|             "data": { |             "data": { | ||||||
|               "ssl": "Use HTTPS", |               "ssl": "Use HTTPS", | ||||||
|               "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" |               "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" | ||||||
| @@ -66,7 +44,6 @@ | |||||||
|     "abort": { |     "abort": { | ||||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", |       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", | ||||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", |       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", | ||||||
|       "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", |  | ||||||
|       "unique_id_mismatch": "Re-authentication should be used for the same device not a new one" |       "unique_id_mismatch": "Re-authentication should be used for the same device not a new one" | ||||||
|     } |     } | ||||||
|   }, |   }, | ||||||
|   | |||||||
| @@ -7,5 +7,5 @@ | |||||||
|   "integration_type": "hub", |   "integration_type": "hub", | ||||||
|   "iot_class": "local_polling", |   "iot_class": "local_polling", | ||||||
|   "loggers": ["aioairq"], |   "loggers": ["aioairq"], | ||||||
|   "requirements": ["aioairq==0.4.7"] |   "requirements": ["aioairq==0.4.6"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -29,7 +29,7 @@ | |||||||
|         }, |         }, | ||||||
|         "data_description": { |         "data_description": { | ||||||
|           "return_average": "air-Q allows to poll both the noisy sensor readings as well as the values averaged on the device (default)", |           "return_average": "air-Q allows to poll both the noisy sensor readings as well as the values averaged on the device (default)", | ||||||
|           "clip_negatives": "For baseline calibration purposes, certain sensor values may briefly become negative. The default behavior is to clip such values to 0" |           "clip_negatives": "For baseline calibration purposes, certain sensor values may briefly become negative. The default behaviour is to clip such values to 0" | ||||||
|         } |         } | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -16,12 +16,10 @@ from homeassistant.components.sensor import ( | |||||||
| from homeassistant.const import ( | from homeassistant.const import ( | ||||||
|     CONCENTRATION_PARTS_PER_BILLION, |     CONCENTRATION_PARTS_PER_BILLION, | ||||||
|     CONCENTRATION_PARTS_PER_MILLION, |     CONCENTRATION_PARTS_PER_MILLION, | ||||||
|     LIGHT_LUX, |  | ||||||
|     PERCENTAGE, |     PERCENTAGE, | ||||||
|     EntityCategory, |     EntityCategory, | ||||||
|     Platform, |     Platform, | ||||||
|     UnitOfPressure, |     UnitOfPressure, | ||||||
|     UnitOfSoundPressure, |  | ||||||
|     UnitOfTemperature, |     UnitOfTemperature, | ||||||
| ) | ) | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant, callback | ||||||
| @@ -114,21 +112,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = { | |||||||
|         state_class=SensorStateClass.MEASUREMENT, |         state_class=SensorStateClass.MEASUREMENT, | ||||||
|         suggested_display_precision=0, |         suggested_display_precision=0, | ||||||
|     ), |     ), | ||||||
|     "lux": SensorEntityDescription( |  | ||||||
|         key="lux", |  | ||||||
|         device_class=SensorDeviceClass.ILLUMINANCE, |  | ||||||
|         native_unit_of_measurement=LIGHT_LUX, |  | ||||||
|         state_class=SensorStateClass.MEASUREMENT, |  | ||||||
|         suggested_display_precision=0, |  | ||||||
|     ), |  | ||||||
|     "noise": SensorEntityDescription( |  | ||||||
|         key="noise", |  | ||||||
|         translation_key="ambient_noise", |  | ||||||
|         device_class=SensorDeviceClass.SOUND_PRESSURE, |  | ||||||
|         native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A, |  | ||||||
|         state_class=SensorStateClass.MEASUREMENT, |  | ||||||
|         suggested_display_precision=0, |  | ||||||
|     ), |  | ||||||
| } | } | ||||||
|  |  | ||||||
| PARALLEL_UPDATES = 0 | PARALLEL_UPDATES = 0 | ||||||
|   | |||||||
| @@ -41,9 +41,6 @@ | |||||||
|       }, |       }, | ||||||
|       "illuminance": { |       "illuminance": { | ||||||
|         "name": "[%key:component::sensor::entity_component::illuminance::name%]" |         "name": "[%key:component::sensor::entity_component::illuminance::name%]" | ||||||
|       }, |  | ||||||
|       "ambient_noise": { |  | ||||||
|         "name": "Ambient noise" |  | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -2,9 +2,10 @@ | |||||||
|  |  | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
|  | import asyncio | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| import logging | import logging | ||||||
| from typing import Any, Final, final | from typing import TYPE_CHECKING, Any, Final, final | ||||||
|  |  | ||||||
| from propcache.api import cached_property | from propcache.api import cached_property | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| @@ -27,6 +28,8 @@ from homeassistant.helpers import config_validation as cv | |||||||
| from homeassistant.helpers.config_validation import make_entity_service_schema | from homeassistant.helpers.config_validation import make_entity_service_schema | ||||||
| from homeassistant.helpers.entity import Entity, EntityDescription | from homeassistant.helpers.entity import Entity, EntityDescription | ||||||
| from homeassistant.helpers.entity_component import EntityComponent | from homeassistant.helpers.entity_component import EntityComponent | ||||||
|  | from homeassistant.helpers.entity_platform import EntityPlatform | ||||||
|  | from homeassistant.helpers.frame import ReportBehavior, report_usage | ||||||
| from homeassistant.helpers.typing import ConfigType | from homeassistant.helpers.typing import ConfigType | ||||||
| from homeassistant.util.hass_dict import HassKey | from homeassistant.util.hass_dict import HassKey | ||||||
|  |  | ||||||
| @@ -146,11 +149,68 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A | |||||||
|     ) |     ) | ||||||
|     _alarm_control_panel_option_default_code: str | None = None |     _alarm_control_panel_option_default_code: str | None = None | ||||||
|  |  | ||||||
|  |     __alarm_legacy_state: bool = False | ||||||
|  |  | ||||||
|  |     def __init_subclass__(cls, **kwargs: Any) -> None: | ||||||
|  |         """Post initialisation processing.""" | ||||||
|  |         super().__init_subclass__(**kwargs) | ||||||
|  |         if any(method in cls.__dict__ for method in ("_attr_state", "state")): | ||||||
|  |             # Integrations should use the 'alarm_state' property instead of | ||||||
|  |             # setting the state directly. | ||||||
|  |             cls.__alarm_legacy_state = True | ||||||
|  |  | ||||||
|  |     def __setattr__(self, name: str, value: Any, /) -> None: | ||||||
|  |         """Set attribute. | ||||||
|  |  | ||||||
|  |         Deprecation warning if setting '_attr_state' directly | ||||||
|  |         unless already reported. | ||||||
|  |         """ | ||||||
|  |         if name == "_attr_state": | ||||||
|  |             self._report_deprecated_alarm_state_handling() | ||||||
|  |         return super().__setattr__(name, value) | ||||||
|  |  | ||||||
|  |     @callback | ||||||
|  |     def add_to_platform_start( | ||||||
|  |         self, | ||||||
|  |         hass: HomeAssistant, | ||||||
|  |         platform: EntityPlatform, | ||||||
|  |         parallel_updates: asyncio.Semaphore | None, | ||||||
|  |     ) -> None: | ||||||
|  |         """Start adding an entity to a platform.""" | ||||||
|  |         super().add_to_platform_start(hass, platform, parallel_updates) | ||||||
|  |         if self.__alarm_legacy_state: | ||||||
|  |             self._report_deprecated_alarm_state_handling() | ||||||
|  |  | ||||||
|  |     @callback | ||||||
|  |     def _report_deprecated_alarm_state_handling(self) -> None: | ||||||
|  |         """Report on deprecated handling of alarm state. | ||||||
|  |  | ||||||
|  |         Integrations should implement alarm_state instead of using state directly. | ||||||
|  |         """ | ||||||
|  |         report_usage( | ||||||
|  |             "is setting state directly." | ||||||
|  |             f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'" | ||||||
|  |             " property and return its state using the AlarmControlPanelState enum", | ||||||
|  |             core_integration_behavior=ReportBehavior.ERROR, | ||||||
|  |             custom_integration_behavior=ReportBehavior.LOG, | ||||||
|  |             breaks_in_ha_version="2025.11", | ||||||
|  |             integration_domain=self.platform.platform_name if self.platform else None, | ||||||
|  |             exclude_integrations={DOMAIN}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     @final |     @final | ||||||
|     @property |     @property | ||||||
|     def state(self) -> str | None: |     def state(self) -> str | None: | ||||||
|         """Return the current state.""" |         """Return the current state.""" | ||||||
|         return self.alarm_state |         if (alarm_state := self.alarm_state) is not None: | ||||||
|  |             return alarm_state | ||||||
|  |         if self._attr_state is not None: | ||||||
|  |             # Backwards compatibility for integrations that set state directly | ||||||
|  |             # Should be removed in 2025.11 | ||||||
|  |             if TYPE_CHECKING: | ||||||
|  |                 assert isinstance(self._attr_state, str) | ||||||
|  |             return self._attr_state | ||||||
|  |         return None | ||||||
|  |  | ||||||
|     @cached_property |     @cached_property | ||||||
|     def alarm_state(self) -> AlarmControlPanelState | None: |     def alarm_state(self) -> AlarmControlPanelState | None: | ||||||
|   | |||||||
| @@ -1472,10 +1472,10 @@ class AlexaModeController(AlexaCapability): | |||||||
|             # Return state instead of position when using ModeController. |             # Return state instead of position when using ModeController. | ||||||
|             mode = self.entity.state |             mode = self.entity.state | ||||||
|             if mode in ( |             if mode in ( | ||||||
|                 cover.CoverState.OPEN, |                 cover.STATE_OPEN, | ||||||
|                 cover.CoverState.OPENING, |                 cover.STATE_OPENING, | ||||||
|                 cover.CoverState.CLOSED, |                 cover.STATE_CLOSED, | ||||||
|                 cover.CoverState.CLOSING, |                 cover.STATE_CLOSING, | ||||||
|                 STATE_UNKNOWN, |                 STATE_UNKNOWN, | ||||||
|             ): |             ): | ||||||
|                 return f"{cover.ATTR_POSITION}.{mode}" |                 return f"{cover.ATTR_POSITION}.{mode}" | ||||||
| @@ -1594,11 +1594,11 @@ class AlexaModeController(AlexaCapability): | |||||||
|                 ["Position", AlexaGlobalCatalog.SETTING_OPENING], False |                 ["Position", AlexaGlobalCatalog.SETTING_OPENING], False | ||||||
|             ) |             ) | ||||||
|             self._resource.add_mode( |             self._resource.add_mode( | ||||||
|                 f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}", |                 f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}", | ||||||
|                 [AlexaGlobalCatalog.VALUE_OPEN], |                 [AlexaGlobalCatalog.VALUE_OPEN], | ||||||
|             ) |             ) | ||||||
|             self._resource.add_mode( |             self._resource.add_mode( | ||||||
|                 f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}", |                 f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}", | ||||||
|                 [AlexaGlobalCatalog.VALUE_CLOSE], |                 [AlexaGlobalCatalog.VALUE_CLOSE], | ||||||
|             ) |             ) | ||||||
|             self._resource.add_mode( |             self._resource.add_mode( | ||||||
| @@ -1651,22 +1651,22 @@ class AlexaModeController(AlexaCapability): | |||||||
|                 raise_labels.append(AlexaSemantics.ACTION_OPEN) |                 raise_labels.append(AlexaSemantics.ACTION_OPEN) | ||||||
|                 self._semantics.add_states_to_value( |                 self._semantics.add_states_to_value( | ||||||
|                     [AlexaSemantics.STATES_CLOSED], |                     [AlexaSemantics.STATES_CLOSED], | ||||||
|                     f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}", |                     f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}", | ||||||
|                 ) |                 ) | ||||||
|                 self._semantics.add_states_to_value( |                 self._semantics.add_states_to_value( | ||||||
|                     [AlexaSemantics.STATES_OPEN], |                     [AlexaSemantics.STATES_OPEN], | ||||||
|                     f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}", |                     f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}", | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             self._semantics.add_action_to_directive( |             self._semantics.add_action_to_directive( | ||||||
|                 lower_labels, |                 lower_labels, | ||||||
|                 "SetMode", |                 "SetMode", | ||||||
|                 {"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}"}, |                 {"mode": f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}"}, | ||||||
|             ) |             ) | ||||||
|             self._semantics.add_action_to_directive( |             self._semantics.add_action_to_directive( | ||||||
|                 raise_labels, |                 raise_labels, | ||||||
|                 "SetMode", |                 "SetMode", | ||||||
|                 {"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}"}, |                 {"mode": f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}"}, | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|             return self._semantics.serialize_semantics() |             return self._semantics.serialize_semantics() | ||||||
|   | |||||||
| @@ -1261,9 +1261,9 @@ async def async_api_set_mode( | |||||||
|     elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}": |     elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}": | ||||||
|         position = mode.split(".")[1] |         position = mode.split(".")[1] | ||||||
|  |  | ||||||
|         if position == cover.CoverState.CLOSED: |         if position == cover.STATE_CLOSED: | ||||||
|             service = cover.SERVICE_CLOSE_COVER |             service = cover.SERVICE_CLOSE_COVER | ||||||
|         elif position == cover.CoverState.OPEN: |         elif position == cover.STATE_OPEN: | ||||||
|             service = cover.SERVICE_OPEN_COVER |             service = cover.SERVICE_OPEN_COVER | ||||||
|         elif position == "custom": |         elif position == "custom": | ||||||
|             service = cover.SERVICE_STOP_COVER |             service = cover.SERVICE_STOP_COVER | ||||||
|   | |||||||
| @@ -8,5 +8,5 @@ | |||||||
|   "iot_class": "cloud_polling", |   "iot_class": "cloud_polling", | ||||||
|   "loggers": ["aioamazondevices"], |   "loggers": ["aioamazondevices"], | ||||||
|   "quality_scale": "platinum", |   "quality_scale": "platinum", | ||||||
|   "requirements": ["aioamazondevices==6.4.4"] |   "requirements": ["aioamazondevices==6.2.9"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -41,11 +41,6 @@ APPS_NEW_ID = "add_new" | |||||||
| CONF_APP_DELETE = "app_delete" | CONF_APP_DELETE = "app_delete" | ||||||
| CONF_APP_ID = "app_id" | CONF_APP_ID = "app_id" | ||||||
|  |  | ||||||
| _EXAMPLE_APP_ID = "com.plexapp.android" |  | ||||||
| _EXAMPLE_APP_PLAY_STORE_URL = ( |  | ||||||
|     f"https://play.google.com/store/apps/details?id={_EXAMPLE_APP_ID}" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| STEP_PAIR_DATA_SCHEMA = vol.Schema( | STEP_PAIR_DATA_SCHEMA = vol.Schema( | ||||||
|     { |     { | ||||||
|         vol.Required("pin"): str, |         vol.Required("pin"): str, | ||||||
| @@ -360,7 +355,5 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload): | |||||||
|             data_schema=data_schema, |             data_schema=data_schema, | ||||||
|             description_placeholders={ |             description_placeholders={ | ||||||
|                 "app_id": f"`{app_id}`" if app_id != APPS_NEW_ID else "", |                 "app_id": f"`{app_id}`" if app_id != APPS_NEW_ID else "", | ||||||
|                 "example_app_id": _EXAMPLE_APP_ID, |  | ||||||
|                 "example_app_play_store_url": _EXAMPLE_APP_PLAY_STORE_URL, |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -75,7 +75,7 @@ | |||||||
|         }, |         }, | ||||||
|         "data_description": { |         "data_description": { | ||||||
|           "app_name": "Name of the application as you would like it to be displayed in Home Assistant.", |           "app_name": "Name of the application as you would like it to be displayed in Home Assistant.", | ||||||
|           "app_id": "E.g. {example_app_id} for {example_app_play_store_url}", |           "app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android", | ||||||
|           "app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename", |           "app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename", | ||||||
|           "app_delete": "Check this box to delete the application from the list." |           "app_delete": "Check this box to delete the application from the list." | ||||||
|         } |         } | ||||||
|   | |||||||
| @@ -4,15 +4,12 @@ from __future__ import annotations | |||||||
|  |  | ||||||
| from collections.abc import Mapping | from collections.abc import Mapping | ||||||
| from functools import partial | from functools import partial | ||||||
| import json |  | ||||||
| import logging | import logging | ||||||
| from typing import Any, cast | from typing import Any, cast | ||||||
|  |  | ||||||
| import anthropic | import anthropic | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
| from voluptuous_openapi import convert |  | ||||||
|  |  | ||||||
| from homeassistant.components.zone import ENTITY_ID_HOME |  | ||||||
| from homeassistant.config_entries import ( | from homeassistant.config_entries import ( | ||||||
|     ConfigEntry, |     ConfigEntry, | ||||||
|     ConfigEntryState, |     ConfigEntryState, | ||||||
| @@ -21,13 +18,7 @@ from homeassistant.config_entries import ( | |||||||
|     ConfigSubentryFlow, |     ConfigSubentryFlow, | ||||||
|     SubentryFlowResult, |     SubentryFlowResult, | ||||||
| ) | ) | ||||||
| from homeassistant.const import ( | from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME | ||||||
|     ATTR_LATITUDE, |  | ||||||
|     ATTR_LONGITUDE, |  | ||||||
|     CONF_API_KEY, |  | ||||||
|     CONF_LLM_HASS_API, |  | ||||||
|     CONF_NAME, |  | ||||||
| ) |  | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant, callback | ||||||
| from homeassistant.helpers import llm | from homeassistant.helpers import llm | ||||||
| from homeassistant.helpers.selector import ( | from homeassistant.helpers.selector import ( | ||||||
| @@ -46,23 +37,12 @@ from .const import ( | |||||||
|     CONF_RECOMMENDED, |     CONF_RECOMMENDED, | ||||||
|     CONF_TEMPERATURE, |     CONF_TEMPERATURE, | ||||||
|     CONF_THINKING_BUDGET, |     CONF_THINKING_BUDGET, | ||||||
|     CONF_WEB_SEARCH, |  | ||||||
|     CONF_WEB_SEARCH_CITY, |  | ||||||
|     CONF_WEB_SEARCH_COUNTRY, |  | ||||||
|     CONF_WEB_SEARCH_MAX_USES, |  | ||||||
|     CONF_WEB_SEARCH_REGION, |  | ||||||
|     CONF_WEB_SEARCH_TIMEZONE, |  | ||||||
|     CONF_WEB_SEARCH_USER_LOCATION, |  | ||||||
|     DEFAULT_CONVERSATION_NAME, |     DEFAULT_CONVERSATION_NAME, | ||||||
|     DOMAIN, |     DOMAIN, | ||||||
|     RECOMMENDED_CHAT_MODEL, |     RECOMMENDED_CHAT_MODEL, | ||||||
|     RECOMMENDED_MAX_TOKENS, |     RECOMMENDED_MAX_TOKENS, | ||||||
|     RECOMMENDED_TEMPERATURE, |     RECOMMENDED_TEMPERATURE, | ||||||
|     RECOMMENDED_THINKING_BUDGET, |     RECOMMENDED_THINKING_BUDGET, | ||||||
|     RECOMMENDED_WEB_SEARCH, |  | ||||||
|     RECOMMENDED_WEB_SEARCH_MAX_USES, |  | ||||||
|     RECOMMENDED_WEB_SEARCH_USER_LOCATION, |  | ||||||
|     WEB_SEARCH_UNSUPPORTED_MODELS, |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
| @@ -188,14 +168,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow): | |||||||
|                 CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET |                 CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET | ||||||
|             ) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS): |             ) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS): | ||||||
|                 errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" |                 errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" | ||||||
|             if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH): |  | ||||||
|                 model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) |  | ||||||
|                 if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)): |  | ||||||
|                     errors[CONF_WEB_SEARCH] = "web_search_unsupported_model" |  | ||||||
|                 elif user_input.get( |  | ||||||
|                     CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION |  | ||||||
|                 ): |  | ||||||
|                     user_input.update(await self._get_location_data()) |  | ||||||
|  |  | ||||||
|             if not errors: |             if not errors: | ||||||
|                 if self._is_new: |                 if self._is_new: | ||||||
| @@ -243,68 +215,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow): | |||||||
|             errors=errors or None, |             errors=errors or None, | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     async def _get_location_data(self) -> dict[str, str]: |  | ||||||
|         """Get approximate location data of the user.""" |  | ||||||
|         location_data: dict[str, str] = {} |  | ||||||
|         zone_home = self.hass.states.get(ENTITY_ID_HOME) |  | ||||||
|         if zone_home is not None: |  | ||||||
|             client = await self.hass.async_add_executor_job( |  | ||||||
|                 partial( |  | ||||||
|                     anthropic.AsyncAnthropic, |  | ||||||
|                     api_key=self._get_entry().data[CONF_API_KEY], |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|             location_schema = vol.Schema( |  | ||||||
|                 { |  | ||||||
|                     vol.Optional( |  | ||||||
|                         CONF_WEB_SEARCH_CITY, |  | ||||||
|                         description="Free text input for the city, e.g. `San Francisco`", |  | ||||||
|                     ): str, |  | ||||||
|                     vol.Optional( |  | ||||||
|                         CONF_WEB_SEARCH_REGION, |  | ||||||
|                         description="Free text input for the region, e.g. `California`", |  | ||||||
|                     ): str, |  | ||||||
|                 } |  | ||||||
|             ) |  | ||||||
|             response = await client.messages.create( |  | ||||||
|                 model=RECOMMENDED_CHAT_MODEL, |  | ||||||
|                 messages=[ |  | ||||||
|                     { |  | ||||||
|                         "role": "user", |  | ||||||
|                         "content": "Where are the following coordinates located: " |  | ||||||
|                         f"({zone_home.attributes[ATTR_LATITUDE]}," |  | ||||||
|                         f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond " |  | ||||||
|                         "only with a JSON object using the following schema:\n" |  | ||||||
|                         f"{convert(location_schema)}", |  | ||||||
|                     }, |  | ||||||
|                     { |  | ||||||
|                         "role": "assistant", |  | ||||||
|                         "content": "{",  # hints the model to skip any preamble |  | ||||||
|                     }, |  | ||||||
|                 ], |  | ||||||
|                 max_tokens=RECOMMENDED_MAX_TOKENS, |  | ||||||
|             ) |  | ||||||
|             _LOGGER.debug("Model response: %s", response.content) |  | ||||||
|             location_data = location_schema( |  | ||||||
|                 json.loads( |  | ||||||
|                     "{" |  | ||||||
|                     + "".join( |  | ||||||
|                         block.text |  | ||||||
|                         for block in response.content |  | ||||||
|                         if isinstance(block, anthropic.types.TextBlock) |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|                 or {} |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         if self.hass.config.country: |  | ||||||
|             location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country |  | ||||||
|         location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone |  | ||||||
|  |  | ||||||
|         _LOGGER.debug("Location data: %s", location_data) |  | ||||||
|  |  | ||||||
|         return location_data |  | ||||||
|  |  | ||||||
|     async_step_user = async_step_set_options |     async_step_user = async_step_set_options | ||||||
|     async_step_reconfigure = async_step_set_options |     async_step_reconfigure = async_step_set_options | ||||||
|  |  | ||||||
| @@ -363,18 +273,6 @@ def anthropic_config_option_schema( | |||||||
|                 CONF_THINKING_BUDGET, |                 CONF_THINKING_BUDGET, | ||||||
|                 default=RECOMMENDED_THINKING_BUDGET, |                 default=RECOMMENDED_THINKING_BUDGET, | ||||||
|             ): int, |             ): int, | ||||||
|             vol.Optional( |  | ||||||
|                 CONF_WEB_SEARCH, |  | ||||||
|                 default=RECOMMENDED_WEB_SEARCH, |  | ||||||
|             ): bool, |  | ||||||
|             vol.Optional( |  | ||||||
|                 CONF_WEB_SEARCH_MAX_USES, |  | ||||||
|                 default=RECOMMENDED_WEB_SEARCH_MAX_USES, |  | ||||||
|             ): int, |  | ||||||
|             vol.Optional( |  | ||||||
|                 CONF_WEB_SEARCH_USER_LOCATION, |  | ||||||
|                 default=RECOMMENDED_WEB_SEARCH_USER_LOCATION, |  | ||||||
|             ): bool, |  | ||||||
|         } |         } | ||||||
|     ) |     ) | ||||||
|     return schema |     return schema | ||||||
|   | |||||||
| @@ -18,26 +18,9 @@ RECOMMENDED_TEMPERATURE = 1.0 | |||||||
| CONF_THINKING_BUDGET = "thinking_budget" | CONF_THINKING_BUDGET = "thinking_budget" | ||||||
| RECOMMENDED_THINKING_BUDGET = 0 | RECOMMENDED_THINKING_BUDGET = 0 | ||||||
| MIN_THINKING_BUDGET = 1024 | MIN_THINKING_BUDGET = 1024 | ||||||
| CONF_WEB_SEARCH = "web_search" |  | ||||||
| RECOMMENDED_WEB_SEARCH = False |  | ||||||
| CONF_WEB_SEARCH_USER_LOCATION = "user_location" |  | ||||||
| RECOMMENDED_WEB_SEARCH_USER_LOCATION = False |  | ||||||
| CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses" |  | ||||||
| RECOMMENDED_WEB_SEARCH_MAX_USES = 5 |  | ||||||
| CONF_WEB_SEARCH_CITY = "city" |  | ||||||
| CONF_WEB_SEARCH_REGION = "region" |  | ||||||
| CONF_WEB_SEARCH_COUNTRY = "country" |  | ||||||
| CONF_WEB_SEARCH_TIMEZONE = "timezone" |  | ||||||
|  |  | ||||||
| NON_THINKING_MODELS = [ | NON_THINKING_MODELS = [ | ||||||
|     "claude-3-5",  # Both sonnet and haiku |     "claude-3-5",  # Both sonnet and haiku | ||||||
|     "claude-3-opus", |     "claude-3-opus", | ||||||
|     "claude-3-haiku", |     "claude-3-haiku", | ||||||
| ] | ] | ||||||
|  |  | ||||||
| WEB_SEARCH_UNSUPPORTED_MODELS = [ |  | ||||||
|     "claude-3-haiku", |  | ||||||
|     "claude-3-opus", |  | ||||||
|     "claude-3-5-sonnet-20240620", |  | ||||||
|     "claude-3-5-sonnet-20241022", |  | ||||||
| ] |  | ||||||
|   | |||||||
| @@ -1,17 +1,12 @@ | |||||||
| """Base entity for Anthropic.""" | """Base entity for Anthropic.""" | ||||||
|  |  | ||||||
| from collections.abc import AsyncGenerator, Callable, Iterable | from collections.abc import AsyncGenerator, Callable, Iterable | ||||||
| from dataclasses import dataclass, field |  | ||||||
| import json | import json | ||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| import anthropic | import anthropic | ||||||
| from anthropic import AsyncStream | from anthropic import AsyncStream | ||||||
| from anthropic.types import ( | from anthropic.types import ( | ||||||
|     CitationsDelta, |  | ||||||
|     CitationsWebSearchResultLocation, |  | ||||||
|     CitationWebSearchResultLocationParam, |  | ||||||
|     ContentBlockParam, |  | ||||||
|     InputJSONDelta, |     InputJSONDelta, | ||||||
|     MessageDeltaUsage, |     MessageDeltaUsage, | ||||||
|     MessageParam, |     MessageParam, | ||||||
| @@ -21,16 +16,11 @@ from anthropic.types import ( | |||||||
|     RawContentBlockStopEvent, |     RawContentBlockStopEvent, | ||||||
|     RawMessageDeltaEvent, |     RawMessageDeltaEvent, | ||||||
|     RawMessageStartEvent, |     RawMessageStartEvent, | ||||||
|     RawMessageStopEvent, |  | ||||||
|     RedactedThinkingBlock, |     RedactedThinkingBlock, | ||||||
|     RedactedThinkingBlockParam, |     RedactedThinkingBlockParam, | ||||||
|     ServerToolUseBlock, |  | ||||||
|     ServerToolUseBlockParam, |  | ||||||
|     SignatureDelta, |     SignatureDelta, | ||||||
|     TextBlock, |     TextBlock, | ||||||
|     TextBlockParam, |     TextBlockParam, | ||||||
|     TextCitation, |  | ||||||
|     TextCitationParam, |  | ||||||
|     TextDelta, |     TextDelta, | ||||||
|     ThinkingBlock, |     ThinkingBlock, | ||||||
|     ThinkingBlockParam, |     ThinkingBlockParam, | ||||||
| @@ -39,15 +29,9 @@ from anthropic.types import ( | |||||||
|     ThinkingDelta, |     ThinkingDelta, | ||||||
|     ToolParam, |     ToolParam, | ||||||
|     ToolResultBlockParam, |     ToolResultBlockParam, | ||||||
|     ToolUnionParam, |  | ||||||
|     ToolUseBlock, |     ToolUseBlock, | ||||||
|     ToolUseBlockParam, |     ToolUseBlockParam, | ||||||
|     Usage, |     Usage, | ||||||
|     WebSearchTool20250305Param, |  | ||||||
|     WebSearchToolRequestErrorParam, |  | ||||||
|     WebSearchToolResultBlock, |  | ||||||
|     WebSearchToolResultBlockParam, |  | ||||||
|     WebSearchToolResultError, |  | ||||||
| ) | ) | ||||||
| from anthropic.types.message_create_params import MessageCreateParamsStreaming | from anthropic.types.message_create_params import MessageCreateParamsStreaming | ||||||
| from voluptuous_openapi import convert | from voluptuous_openapi import convert | ||||||
| @@ -64,13 +48,6 @@ from .const import ( | |||||||
|     CONF_MAX_TOKENS, |     CONF_MAX_TOKENS, | ||||||
|     CONF_TEMPERATURE, |     CONF_TEMPERATURE, | ||||||
|     CONF_THINKING_BUDGET, |     CONF_THINKING_BUDGET, | ||||||
|     CONF_WEB_SEARCH, |  | ||||||
|     CONF_WEB_SEARCH_CITY, |  | ||||||
|     CONF_WEB_SEARCH_COUNTRY, |  | ||||||
|     CONF_WEB_SEARCH_MAX_USES, |  | ||||||
|     CONF_WEB_SEARCH_REGION, |  | ||||||
|     CONF_WEB_SEARCH_TIMEZONE, |  | ||||||
|     CONF_WEB_SEARCH_USER_LOCATION, |  | ||||||
|     DOMAIN, |     DOMAIN, | ||||||
|     LOGGER, |     LOGGER, | ||||||
|     MIN_THINKING_BUDGET, |     MIN_THINKING_BUDGET, | ||||||
| @@ -96,69 +73,6 @@ def _format_tool( | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class CitationDetails: |  | ||||||
|     """Citation details for a content part.""" |  | ||||||
|  |  | ||||||
|     index: int = 0 |  | ||||||
|     """Start position of the text.""" |  | ||||||
|  |  | ||||||
|     length: int = 0 |  | ||||||
|     """Length of the relevant data.""" |  | ||||||
|  |  | ||||||
|     citations: list[TextCitationParam] = field(default_factory=list) |  | ||||||
|     """Citations for the content part.""" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass(slots=True) |  | ||||||
| class ContentDetails: |  | ||||||
|     """Native data for AssistantContent.""" |  | ||||||
|  |  | ||||||
|     citation_details: list[CitationDetails] = field(default_factory=list) |  | ||||||
|  |  | ||||||
|     def has_content(self) -> bool: |  | ||||||
|         """Check if there is any content.""" |  | ||||||
|         return any(detail.length > 0 for detail in self.citation_details) |  | ||||||
|  |  | ||||||
|     def has_citations(self) -> bool: |  | ||||||
|         """Check if there are any citations.""" |  | ||||||
|         return any(detail.citations for detail in self.citation_details) |  | ||||||
|  |  | ||||||
|     def add_citation_detail(self) -> None: |  | ||||||
|         """Add a new citation detail.""" |  | ||||||
|         if not self.citation_details or self.citation_details[-1].length > 0: |  | ||||||
|             self.citation_details.append( |  | ||||||
|                 CitationDetails( |  | ||||||
|                     index=self.citation_details[-1].index |  | ||||||
|                     + self.citation_details[-1].length |  | ||||||
|                     if self.citation_details |  | ||||||
|                     else 0 |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def add_citation(self, citation: TextCitation) -> None: |  | ||||||
|         """Add a citation to the current detail.""" |  | ||||||
|         if not self.citation_details: |  | ||||||
|             self.citation_details.append(CitationDetails()) |  | ||||||
|         citation_param: TextCitationParam | None = None |  | ||||||
|         if isinstance(citation, CitationsWebSearchResultLocation): |  | ||||||
|             citation_param = CitationWebSearchResultLocationParam( |  | ||||||
|                 type="web_search_result_location", |  | ||||||
|                 title=citation.title, |  | ||||||
|                 url=citation.url, |  | ||||||
|                 cited_text=citation.cited_text, |  | ||||||
|                 encrypted_index=citation.encrypted_index, |  | ||||||
|             ) |  | ||||||
|         if citation_param: |  | ||||||
|             self.citation_details[-1].citations.append(citation_param) |  | ||||||
|  |  | ||||||
|     def delete_empty(self) -> None: |  | ||||||
|         """Delete empty citation details.""" |  | ||||||
|         self.citation_details = [ |  | ||||||
|             detail for detail in self.citation_details if detail.citations |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _convert_content( | def _convert_content( | ||||||
|     chat_content: Iterable[conversation.Content], |     chat_content: Iterable[conversation.Content], | ||||||
| ) -> list[MessageParam]: | ) -> list[MessageParam]: | ||||||
| @@ -167,31 +81,15 @@ def _convert_content( | |||||||
|  |  | ||||||
|     for content in chat_content: |     for content in chat_content: | ||||||
|         if isinstance(content, conversation.ToolResultContent): |         if isinstance(content, conversation.ToolResultContent): | ||||||
|             if content.tool_name == "web_search": |  | ||||||
|                 tool_result_block: ContentBlockParam = WebSearchToolResultBlockParam( |  | ||||||
|                     type="web_search_tool_result", |  | ||||||
|                     tool_use_id=content.tool_call_id, |  | ||||||
|                     content=content.tool_result["content"] |  | ||||||
|                     if "content" in content.tool_result |  | ||||||
|                     else WebSearchToolRequestErrorParam( |  | ||||||
|                         type="web_search_tool_result_error", |  | ||||||
|                         error_code=content.tool_result.get("error_code", "unavailable"),  # type: ignore[typeddict-item] |  | ||||||
|                     ), |  | ||||||
|                 ) |  | ||||||
|                 external_tool = True |  | ||||||
|             else: |  | ||||||
|             tool_result_block = ToolResultBlockParam( |             tool_result_block = ToolResultBlockParam( | ||||||
|                 type="tool_result", |                 type="tool_result", | ||||||
|                 tool_use_id=content.tool_call_id, |                 tool_use_id=content.tool_call_id, | ||||||
|                 content=json.dumps(content.tool_result), |                 content=json.dumps(content.tool_result), | ||||||
|             ) |             ) | ||||||
|                 external_tool = False |             if not messages or messages[-1]["role"] != "user": | ||||||
|             if not messages or messages[-1]["role"] != ( |  | ||||||
|                 "assistant" if external_tool else "user" |  | ||||||
|             ): |  | ||||||
|                 messages.append( |                 messages.append( | ||||||
|                     MessageParam( |                     MessageParam( | ||||||
|                         role="assistant" if external_tool else "user", |                         role="user", | ||||||
|                         content=[tool_result_block], |                         content=[tool_result_block], | ||||||
|                     ) |                     ) | ||||||
|                 ) |                 ) | ||||||
| @@ -253,56 +151,13 @@ def _convert_content( | |||||||
|                         redacted_thinking_block |                         redacted_thinking_block | ||||||
|                     ) |                     ) | ||||||
|             if content.content: |             if content.content: | ||||||
|                 current_index = 0 |  | ||||||
|                 for detail in ( |  | ||||||
|                     content.native.citation_details |  | ||||||
|                     if isinstance(content.native, ContentDetails) |  | ||||||
|                     else [CitationDetails(length=len(content.content))] |  | ||||||
|                 ): |  | ||||||
|                     if detail.index > current_index: |  | ||||||
|                         # Add text block for any text without citations |  | ||||||
|                 messages[-1]["content"].append(  # type: ignore[union-attr] |                 messages[-1]["content"].append(  # type: ignore[union-attr] | ||||||
|                             TextBlockParam( |                     TextBlockParam(type="text", text=content.content) | ||||||
|                                 type="text", |  | ||||||
|                                 text=content.content[current_index : detail.index], |  | ||||||
|                             ) |  | ||||||
|                         ) |  | ||||||
|                     messages[-1]["content"].append(  # type: ignore[union-attr] |  | ||||||
|                         TextBlockParam( |  | ||||||
|                             type="text", |  | ||||||
|                             text=content.content[ |  | ||||||
|                                 detail.index : detail.index + detail.length |  | ||||||
|                             ], |  | ||||||
|                             citations=detail.citations, |  | ||||||
|                         ) |  | ||||||
|                         if detail.citations |  | ||||||
|                         else TextBlockParam( |  | ||||||
|                             type="text", |  | ||||||
|                             text=content.content[ |  | ||||||
|                                 detail.index : detail.index + detail.length |  | ||||||
|                             ], |  | ||||||
|                         ) |  | ||||||
|                     ) |  | ||||||
|                     current_index = detail.index + detail.length |  | ||||||
|                 if current_index < len(content.content): |  | ||||||
|                     # Add text block for any remaining text without citations |  | ||||||
|                     messages[-1]["content"].append(  # type: ignore[union-attr] |  | ||||||
|                         TextBlockParam( |  | ||||||
|                             type="text", |  | ||||||
|                             text=content.content[current_index:], |  | ||||||
|                         ) |  | ||||||
|                 ) |                 ) | ||||||
|             if content.tool_calls: |             if content.tool_calls: | ||||||
|                 messages[-1]["content"].extend(  # type: ignore[union-attr] |                 messages[-1]["content"].extend(  # type: ignore[union-attr] | ||||||
|                     [ |                     [ | ||||||
|                         ServerToolUseBlockParam( |                         ToolUseBlockParam( | ||||||
|                             type="server_tool_use", |  | ||||||
|                             id=tool_call.id, |  | ||||||
|                             name="web_search", |  | ||||||
|                             input=tool_call.tool_args, |  | ||||||
|                         ) |  | ||||||
|                         if tool_call.external and tool_call.tool_name == "web_search" |  | ||||||
|                         else ToolUseBlockParam( |  | ||||||
|                             type="tool_use", |                             type="tool_use", | ||||||
|                             id=tool_call.id, |                             id=tool_call.id, | ||||||
|                             name=tool_call.tool_name, |                             name=tool_call.tool_name, | ||||||
| @@ -318,12 +173,10 @@ def _convert_content( | |||||||
|     return messages |     return messages | ||||||
|  |  | ||||||
|  |  | ||||||
| async def _transform_stream(  # noqa: C901 - This is complex, but better to have it in one place | async def _transform_stream( | ||||||
|     chat_log: conversation.ChatLog, |     chat_log: conversation.ChatLog, | ||||||
|     stream: AsyncStream[MessageStreamEvent], |     stream: AsyncStream[MessageStreamEvent], | ||||||
| ) -> AsyncGenerator[ | ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]: | ||||||
|     conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict |  | ||||||
| ]: |  | ||||||
|     """Transform the response stream into HA format. |     """Transform the response stream into HA format. | ||||||
|  |  | ||||||
|     A typical stream of responses might look something like the following: |     A typical stream of responses might look something like the following: | ||||||
| @@ -356,13 +209,11 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|     if stream is None: |     if stream is None: | ||||||
|         raise TypeError("Expected a stream of messages") |         raise TypeError("Expected a stream of messages") | ||||||
|  |  | ||||||
|     current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None |     current_tool_block: ToolUseBlockParam | None = None | ||||||
|     current_tool_args: str |     current_tool_args: str | ||||||
|     content_details = ContentDetails() |  | ||||||
|     content_details.add_citation_detail() |  | ||||||
|     input_usage: Usage | None = None |     input_usage: Usage | None = None | ||||||
|  |     has_content = False | ||||||
|     has_native = False |     has_native = False | ||||||
|     first_block: bool |  | ||||||
|  |  | ||||||
|     async for response in stream: |     async for response in stream: | ||||||
|         LOGGER.debug("Received response: %s", response) |         LOGGER.debug("Received response: %s", response) | ||||||
| @@ -371,7 +222,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|             if response.message.role != "assistant": |             if response.message.role != "assistant": | ||||||
|                 raise ValueError("Unexpected message role") |                 raise ValueError("Unexpected message role") | ||||||
|             input_usage = response.message.usage |             input_usage = response.message.usage | ||||||
|             first_block = True |  | ||||||
|         elif isinstance(response, RawContentBlockStartEvent): |         elif isinstance(response, RawContentBlockStartEvent): | ||||||
|             if isinstance(response.content_block, ToolUseBlock): |             if isinstance(response.content_block, ToolUseBlock): | ||||||
|                 current_tool_block = ToolUseBlockParam( |                 current_tool_block = ToolUseBlockParam( | ||||||
| @@ -382,37 +232,17 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|                 ) |                 ) | ||||||
|                 current_tool_args = "" |                 current_tool_args = "" | ||||||
|             elif isinstance(response.content_block, TextBlock): |             elif isinstance(response.content_block, TextBlock): | ||||||
|                 if (  # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead. |                 if has_content: | ||||||
|                     first_block |  | ||||||
|                     or ( |  | ||||||
|                         not content_details.has_citations() |  | ||||||
|                         and response.content_block.citations is None |  | ||||||
|                         and content_details.has_content() |  | ||||||
|                     ) |  | ||||||
|                 ): |  | ||||||
|                     if content_details.has_citations(): |  | ||||||
|                         content_details.delete_empty() |  | ||||||
|                         yield {"native": content_details} |  | ||||||
|                     content_details = ContentDetails() |  | ||||||
|                     yield {"role": "assistant"} |                     yield {"role": "assistant"} | ||||||
|                     has_native = False |                     has_native = False | ||||||
|                     first_block = False |                 has_content = True | ||||||
|                 content_details.add_citation_detail() |  | ||||||
|                 if response.content_block.text: |                 if response.content_block.text: | ||||||
|                     content_details.citation_details[-1].length += len( |  | ||||||
|                         response.content_block.text |  | ||||||
|                     ) |  | ||||||
|                     yield {"content": response.content_block.text} |                     yield {"content": response.content_block.text} | ||||||
|             elif isinstance(response.content_block, ThinkingBlock): |             elif isinstance(response.content_block, ThinkingBlock): | ||||||
|                 if first_block or has_native: |                 if has_native: | ||||||
|                     if content_details.has_citations(): |  | ||||||
|                         content_details.delete_empty() |  | ||||||
|                         yield {"native": content_details} |  | ||||||
|                     content_details = ContentDetails() |  | ||||||
|                     content_details.add_citation_detail() |  | ||||||
|                     yield {"role": "assistant"} |                     yield {"role": "assistant"} | ||||||
|                     has_native = False |                     has_native = False | ||||||
|                     first_block = False |                     has_content = False | ||||||
|             elif isinstance(response.content_block, RedactedThinkingBlock): |             elif isinstance(response.content_block, RedactedThinkingBlock): | ||||||
|                 LOGGER.debug( |                 LOGGER.debug( | ||||||
|                     "Some of Claude’s internal reasoning has been automatically " |                     "Some of Claude’s internal reasoning has been automatically " | ||||||
| @@ -420,60 +250,15 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|                     "responses" |                     "responses" | ||||||
|                 ) |                 ) | ||||||
|                 if has_native: |                 if has_native: | ||||||
|                     if content_details.has_citations(): |  | ||||||
|                         content_details.delete_empty() |  | ||||||
|                         yield {"native": content_details} |  | ||||||
|                     content_details = ContentDetails() |  | ||||||
|                     content_details.add_citation_detail() |  | ||||||
|                     yield {"role": "assistant"} |                     yield {"role": "assistant"} | ||||||
|                     has_native = False |                     has_native = False | ||||||
|                     first_block = False |                     has_content = False | ||||||
|                 yield {"native": response.content_block} |                 yield {"native": response.content_block} | ||||||
|                 has_native = True |                 has_native = True | ||||||
|             elif isinstance(response.content_block, ServerToolUseBlock): |  | ||||||
|                 current_tool_block = ServerToolUseBlockParam( |  | ||||||
|                     type="server_tool_use", |  | ||||||
|                     id=response.content_block.id, |  | ||||||
|                     name=response.content_block.name, |  | ||||||
|                     input="", |  | ||||||
|                 ) |  | ||||||
|                 current_tool_args = "" |  | ||||||
|             elif isinstance(response.content_block, WebSearchToolResultBlock): |  | ||||||
|                 if content_details.has_citations(): |  | ||||||
|                     content_details.delete_empty() |  | ||||||
|                     yield {"native": content_details} |  | ||||||
|                 content_details = ContentDetails() |  | ||||||
|                 content_details.add_citation_detail() |  | ||||||
|                 yield { |  | ||||||
|                     "role": "tool_result", |  | ||||||
|                     "tool_call_id": response.content_block.tool_use_id, |  | ||||||
|                     "tool_name": "web_search", |  | ||||||
|                     "tool_result": { |  | ||||||
|                         "type": "web_search_tool_result_error", |  | ||||||
|                         "error_code": response.content_block.content.error_code, |  | ||||||
|                     } |  | ||||||
|                     if isinstance( |  | ||||||
|                         response.content_block.content, WebSearchToolResultError |  | ||||||
|                     ) |  | ||||||
|                     else { |  | ||||||
|                         "content": [ |  | ||||||
|                             { |  | ||||||
|                                 "type": "web_search_result", |  | ||||||
|                                 "encrypted_content": block.encrypted_content, |  | ||||||
|                                 "page_age": block.page_age, |  | ||||||
|                                 "title": block.title, |  | ||||||
|                                 "url": block.url, |  | ||||||
|                             } |  | ||||||
|                             for block in response.content_block.content |  | ||||||
|                         ] |  | ||||||
|                     }, |  | ||||||
|                 } |  | ||||||
|                 first_block = True |  | ||||||
|         elif isinstance(response, RawContentBlockDeltaEvent): |         elif isinstance(response, RawContentBlockDeltaEvent): | ||||||
|             if isinstance(response.delta, InputJSONDelta): |             if isinstance(response.delta, InputJSONDelta): | ||||||
|                 current_tool_args += response.delta.partial_json |                 current_tool_args += response.delta.partial_json | ||||||
|             elif isinstance(response.delta, TextDelta): |             elif isinstance(response.delta, TextDelta): | ||||||
|                 content_details.citation_details[-1].length += len(response.delta.text) |  | ||||||
|                 yield {"content": response.delta.text} |                 yield {"content": response.delta.text} | ||||||
|             elif isinstance(response.delta, ThinkingDelta): |             elif isinstance(response.delta, ThinkingDelta): | ||||||
|                 yield {"thinking_content": response.delta.thinking} |                 yield {"thinking_content": response.delta.thinking} | ||||||
| @@ -486,8 +271,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|                     ) |                     ) | ||||||
|                 } |                 } | ||||||
|                 has_native = True |                 has_native = True | ||||||
|             elif isinstance(response.delta, CitationsDelta): |  | ||||||
|                 content_details.add_citation(response.delta.citation) |  | ||||||
|         elif isinstance(response, RawContentBlockStopEvent): |         elif isinstance(response, RawContentBlockStopEvent): | ||||||
|             if current_tool_block is not None: |             if current_tool_block is not None: | ||||||
|                 tool_args = json.loads(current_tool_args) if current_tool_args else {} |                 tool_args = json.loads(current_tool_args) if current_tool_args else {} | ||||||
| @@ -498,7 +281,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|                             id=current_tool_block["id"], |                             id=current_tool_block["id"], | ||||||
|                             tool_name=current_tool_block["name"], |                             tool_name=current_tool_block["name"], | ||||||
|                             tool_args=tool_args, |                             tool_args=tool_args, | ||||||
|                             external=current_tool_block["type"] == "server_tool_use", |  | ||||||
|                         ) |                         ) | ||||||
|                     ] |                     ] | ||||||
|                 } |                 } | ||||||
| @@ -508,12 +290,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | |||||||
|                 chat_log.async_trace(_create_token_stats(input_usage, usage)) |                 chat_log.async_trace(_create_token_stats(input_usage, usage)) | ||||||
|             if response.delta.stop_reason == "refusal": |             if response.delta.stop_reason == "refusal": | ||||||
|                 raise HomeAssistantError("Potential policy violation detected") |                 raise HomeAssistantError("Potential policy violation detected") | ||||||
|         elif isinstance(response, RawMessageStopEvent): |  | ||||||
|             if content_details.has_citations(): |  | ||||||
|                 content_details.delete_empty() |  | ||||||
|                 yield {"native": content_details} |  | ||||||
|             content_details = ContentDetails() |  | ||||||
|             content_details.add_citation_detail() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _create_token_stats( | def _create_token_stats( | ||||||
| @@ -561,11 +337,21 @@ class AnthropicBaseLLMEntity(Entity): | |||||||
|         """Generate an answer for the chat log.""" |         """Generate an answer for the chat log.""" | ||||||
|         options = self.subentry.data |         options = self.subentry.data | ||||||
|  |  | ||||||
|  |         tools: list[ToolParam] | None = None | ||||||
|  |         if chat_log.llm_api: | ||||||
|  |             tools = [ | ||||||
|  |                 _format_tool(tool, chat_log.llm_api.custom_serializer) | ||||||
|  |                 for tool in chat_log.llm_api.tools | ||||||
|  |             ] | ||||||
|  |  | ||||||
|         system = chat_log.content[0] |         system = chat_log.content[0] | ||||||
|         if not isinstance(system, conversation.SystemContent): |         if not isinstance(system, conversation.SystemContent): | ||||||
|             raise TypeError("First message must be a system message") |             raise TypeError("First message must be a system message") | ||||||
|         messages = _convert_content(chat_log.content[1:]) |         messages = _convert_content(chat_log.content[1:]) | ||||||
|  |  | ||||||
|  |         client = self.entry.runtime_data | ||||||
|  |  | ||||||
|  |         thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) | ||||||
|         model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) |         model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) | ||||||
|  |  | ||||||
|         model_args = MessageCreateParamsStreaming( |         model_args = MessageCreateParamsStreaming( | ||||||
| @@ -575,8 +361,8 @@ class AnthropicBaseLLMEntity(Entity): | |||||||
|             system=system.content, |             system=system.content, | ||||||
|             stream=True, |             stream=True, | ||||||
|         ) |         ) | ||||||
|  |         if tools: | ||||||
|         thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) |             model_args["tools"] = tools | ||||||
|         if ( |         if ( | ||||||
|             not model.startswith(tuple(NON_THINKING_MODELS)) |             not model.startswith(tuple(NON_THINKING_MODELS)) | ||||||
|             and thinking_budget >= MIN_THINKING_BUDGET |             and thinking_budget >= MIN_THINKING_BUDGET | ||||||
| @@ -590,34 +376,6 @@ class AnthropicBaseLLMEntity(Entity): | |||||||
|                 CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE |                 CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         tools: list[ToolUnionParam] = [] |  | ||||||
|         if chat_log.llm_api: |  | ||||||
|             tools = [ |  | ||||||
|                 _format_tool(tool, chat_log.llm_api.custom_serializer) |  | ||||||
|                 for tool in chat_log.llm_api.tools |  | ||||||
|             ] |  | ||||||
|  |  | ||||||
|         if options.get(CONF_WEB_SEARCH): |  | ||||||
|             web_search = WebSearchTool20250305Param( |  | ||||||
|                 name="web_search", |  | ||||||
|                 type="web_search_20250305", |  | ||||||
|                 max_uses=options.get(CONF_WEB_SEARCH_MAX_USES), |  | ||||||
|             ) |  | ||||||
|             if options.get(CONF_WEB_SEARCH_USER_LOCATION): |  | ||||||
|                 web_search["user_location"] = { |  | ||||||
|                     "type": "approximate", |  | ||||||
|                     "city": options.get(CONF_WEB_SEARCH_CITY, ""), |  | ||||||
|                     "region": options.get(CONF_WEB_SEARCH_REGION, ""), |  | ||||||
|                     "country": options.get(CONF_WEB_SEARCH_COUNTRY, ""), |  | ||||||
|                     "timezone": options.get(CONF_WEB_SEARCH_TIMEZONE, ""), |  | ||||||
|                 } |  | ||||||
|             tools.append(web_search) |  | ||||||
|  |  | ||||||
|         if tools: |  | ||||||
|             model_args["tools"] = tools |  | ||||||
|  |  | ||||||
|         client = self.entry.runtime_data |  | ||||||
|  |  | ||||||
|         # To prevent infinite loops, we limit the number of iterations |         # To prevent infinite loops, we limit the number of iterations | ||||||
|         for _iteration in range(MAX_TOOL_ITERATIONS): |         for _iteration in range(MAX_TOOL_ITERATIONS): | ||||||
|             try: |             try: | ||||||
|   | |||||||
| @@ -35,17 +35,11 @@ | |||||||
|             "temperature": "Temperature", |             "temperature": "Temperature", | ||||||
|             "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", |             "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", | ||||||
|             "recommended": "Recommended model settings", |             "recommended": "Recommended model settings", | ||||||
|             "thinking_budget": "Thinking budget", |             "thinking_budget_tokens": "Thinking budget" | ||||||
|             "web_search": "Enable web search", |  | ||||||
|             "web_search_max_uses": "Maximum web searches", |  | ||||||
|             "user_location": "Include home location" |  | ||||||
|           }, |           }, | ||||||
|           "data_description": { |           "data_description": { | ||||||
|             "prompt": "Instruct how the LLM should respond. This can be a template.", |             "prompt": "Instruct how the LLM should respond. This can be a template.", | ||||||
|             "thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.", |             "thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking." | ||||||
|             "web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff", |  | ||||||
|             "web_search_max_uses": "Limit the number of searches performed per response", |  | ||||||
|             "user_location": "Localize search results based on home location" |  | ||||||
|           } |           } | ||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
| @@ -54,8 +48,7 @@ | |||||||
|         "entry_not_loaded": "Cannot add things while the configuration is disabled." |         "entry_not_loaded": "Cannot add things while the configuration is disabled." | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.", |         "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget." | ||||||
|         "web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search." |  | ||||||
|       } |       } | ||||||
|     } |     } | ||||||
|   } |   } | ||||||
|   | |||||||
| @@ -5,9 +5,14 @@ from __future__ import annotations | |||||||
| import asyncio | import asyncio | ||||||
| import logging | import logging | ||||||
| from random import randrange | from random import randrange | ||||||
| import sys |  | ||||||
| from typing import Any, cast | from typing import Any, cast | ||||||
|  |  | ||||||
|  | from pyatv import connect, exceptions, scan | ||||||
|  | from pyatv.conf import AppleTV | ||||||
|  | from pyatv.const import DeviceModel, Protocol | ||||||
|  | from pyatv.convert import model_str | ||||||
|  | from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener | ||||||
|  |  | ||||||
| from homeassistant.components import zeroconf | from homeassistant.components import zeroconf | ||||||
| from homeassistant.config_entries import ConfigEntry | from homeassistant.config_entries import ConfigEntry | ||||||
| from homeassistant.const import ( | from homeassistant.const import ( | ||||||
| @@ -24,11 +29,7 @@ from homeassistant.const import ( | |||||||
|     Platform, |     Platform, | ||||||
| ) | ) | ||||||
| from homeassistant.core import Event, HomeAssistant, callback | from homeassistant.core import Event, HomeAssistant, callback | ||||||
| from homeassistant.exceptions import ( | from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady | ||||||
|     ConfigEntryAuthFailed, |  | ||||||
|     ConfigEntryNotReady, |  | ||||||
|     HomeAssistantError, |  | ||||||
| ) |  | ||||||
| from homeassistant.helpers import device_registry as dr | from homeassistant.helpers import device_registry as dr | ||||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||||
| from homeassistant.helpers.dispatcher import async_dispatcher_send | from homeassistant.helpers.dispatcher import async_dispatcher_send | ||||||
| @@ -42,18 +43,6 @@ from .const import ( | |||||||
|     SIGNAL_DISCONNECTED, |     SIGNAL_DISCONNECTED, | ||||||
| ) | ) | ||||||
|  |  | ||||||
| if sys.version_info < (3, 14): |  | ||||||
|     from pyatv import connect, exceptions, scan |  | ||||||
|     from pyatv.conf import AppleTV |  | ||||||
|     from pyatv.const import DeviceModel, Protocol |  | ||||||
|     from pyatv.convert import model_str |  | ||||||
|     from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener |  | ||||||
| else: |  | ||||||
|  |  | ||||||
|     class DeviceListener: |  | ||||||
|         """Dummy class.""" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| DEFAULT_NAME_TV = "Apple TV" | DEFAULT_NAME_TV = "Apple TV" | ||||||
| @@ -64,41 +53,31 @@ BACKOFF_TIME_UPPER_LIMIT = 300  # Five minutes | |||||||
|  |  | ||||||
| PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] | PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] | ||||||
|  |  | ||||||
| if sys.version_info < (3, 14): | AUTH_EXCEPTIONS = ( | ||||||
|     AUTH_EXCEPTIONS = ( |  | ||||||
|     exceptions.AuthenticationError, |     exceptions.AuthenticationError, | ||||||
|     exceptions.InvalidCredentialsError, |     exceptions.InvalidCredentialsError, | ||||||
|     exceptions.NoCredentialsError, |     exceptions.NoCredentialsError, | ||||||
|     ) | ) | ||||||
|     CONNECTION_TIMEOUT_EXCEPTIONS = ( | CONNECTION_TIMEOUT_EXCEPTIONS = ( | ||||||
|     OSError, |     OSError, | ||||||
|     asyncio.CancelledError, |     asyncio.CancelledError, | ||||||
|     TimeoutError, |     TimeoutError, | ||||||
|     exceptions.ConnectionLostError, |     exceptions.ConnectionLostError, | ||||||
|     exceptions.ConnectionFailedError, |     exceptions.ConnectionFailedError, | ||||||
|     ) | ) | ||||||
|     DEVICE_EXCEPTIONS = ( | DEVICE_EXCEPTIONS = ( | ||||||
|     exceptions.ProtocolError, |     exceptions.ProtocolError, | ||||||
|     exceptions.NoServiceError, |     exceptions.NoServiceError, | ||||||
|     exceptions.PairingError, |     exceptions.PairingError, | ||||||
|     exceptions.BackOffError, |     exceptions.BackOffError, | ||||||
|     exceptions.DeviceIdMissingError, |     exceptions.DeviceIdMissingError, | ||||||
|     ) | ) | ||||||
| else: |  | ||||||
|     AUTH_EXCEPTIONS = () |  | ||||||
|     CONNECTION_TIMEOUT_EXCEPTIONS = () |  | ||||||
|     DEVICE_EXCEPTIONS = () |  | ||||||
|  |  | ||||||
|  |  | ||||||
| type AppleTvConfigEntry = ConfigEntry[AppleTVManager] | type AppleTvConfigEntry = ConfigEntry[AppleTVManager] | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool: | async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool: | ||||||
|     """Set up a config entry for Apple TV.""" |     """Set up a config entry for Apple TV.""" | ||||||
|     if sys.version_info >= (3, 14): |  | ||||||
|         raise HomeAssistantError( |  | ||||||
|             "Apple TV is not supported on Python 3.14. Please use Python 3.13." |  | ||||||
|         ) |  | ||||||
|     manager = AppleTVManager(hass, entry) |     manager = AppleTVManager(hass, entry) | ||||||
|  |  | ||||||
|     if manager.is_on: |     if manager.is_on: | ||||||
|   | |||||||
| @@ -7,7 +7,7 @@ | |||||||
|   "documentation": "https://www.home-assistant.io/integrations/apple_tv", |   "documentation": "https://www.home-assistant.io/integrations/apple_tv", | ||||||
|   "iot_class": "local_push", |   "iot_class": "local_push", | ||||||
|   "loggers": ["pyatv", "srptools"], |   "loggers": ["pyatv", "srptools"], | ||||||
|   "requirements": ["pyatv==0.16.1;python_version<'3.14'"], |   "requirements": ["pyatv==0.16.1"], | ||||||
|   "zeroconf": [ |   "zeroconf": [ | ||||||
|     "_mediaremotetv._tcp.local.", |     "_mediaremotetv._tcp.local.", | ||||||
|     "_companion-link._tcp.local.", |     "_companion-link._tcp.local.", | ||||||
|   | |||||||
| @@ -7,8 +7,6 @@ from typing import Any | |||||||
| from pyaprilaire.const import Attribute | from pyaprilaire.const import Attribute | ||||||
|  |  | ||||||
| from homeassistant.components.climate import ( | from homeassistant.components.climate import ( | ||||||
|     ATTR_TARGET_TEMP_HIGH, |  | ||||||
|     ATTR_TARGET_TEMP_LOW, |  | ||||||
|     FAN_AUTO, |     FAN_AUTO, | ||||||
|     FAN_ON, |     FAN_ON, | ||||||
|     PRESET_AWAY, |     PRESET_AWAY, | ||||||
| @@ -18,12 +16,7 @@ from homeassistant.components.climate import ( | |||||||
|     HVACAction, |     HVACAction, | ||||||
|     HVACMode, |     HVACMode, | ||||||
| ) | ) | ||||||
| from homeassistant.const import ( | from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature | ||||||
|     ATTR_TEMPERATURE, |  | ||||||
|     PRECISION_HALVES, |  | ||||||
|     PRECISION_WHOLE, |  | ||||||
|     UnitOfTemperature, |  | ||||||
| ) |  | ||||||
| from homeassistant.core import HomeAssistant | from homeassistant.core import HomeAssistant | ||||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||||
|  |  | ||||||
| @@ -239,15 +232,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity): | |||||||
|         cool_setpoint = 0 |         cool_setpoint = 0 | ||||||
|         heat_setpoint = 0 |         heat_setpoint = 0 | ||||||
|  |  | ||||||
|         if temperature := kwargs.get(ATTR_TEMPERATURE): |         if temperature := kwargs.get("temperature"): | ||||||
|             if self.coordinator.data.get(Attribute.MODE) == 3: |             if self.coordinator.data.get(Attribute.MODE) == 3: | ||||||
|                 cool_setpoint = temperature |                 cool_setpoint = temperature | ||||||
|             else: |             else: | ||||||
|                 heat_setpoint = temperature |                 heat_setpoint = temperature | ||||||
|         else: |         else: | ||||||
|             if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW): |             if target_temp_low := kwargs.get("target_temp_low"): | ||||||
|                 heat_setpoint = target_temp_low |                 heat_setpoint = target_temp_low | ||||||
|             if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH): |             if target_temp_high := kwargs.get("target_temp_high"): | ||||||
|                 cool_setpoint = target_temp_high |                 cool_setpoint = target_temp_high | ||||||
|  |  | ||||||
|         if cool_setpoint == 0 and heat_setpoint == 0: |         if cool_setpoint == 0 and heat_setpoint == 0: | ||||||
|   | |||||||
| @@ -41,8 +41,6 @@ from .pipeline import ( | |||||||
|     async_setup_pipeline_store, |     async_setup_pipeline_store, | ||||||
|     async_update_pipeline, |     async_update_pipeline, | ||||||
| ) | ) | ||||||
| from .select import AssistPipelineSelect, VadSensitivitySelect |  | ||||||
| from .vad import VadSensitivity |  | ||||||
| from .websocket_api import async_register_websocket_api | from .websocket_api import async_register_websocket_api | ||||||
|  |  | ||||||
| __all__ = ( | __all__ = ( | ||||||
| @@ -53,18 +51,16 @@ __all__ = ( | |||||||
|     "SAMPLE_CHANNELS", |     "SAMPLE_CHANNELS", | ||||||
|     "SAMPLE_RATE", |     "SAMPLE_RATE", | ||||||
|     "SAMPLE_WIDTH", |     "SAMPLE_WIDTH", | ||||||
|     "AssistPipelineSelect", |  | ||||||
|     "AudioSettings", |     "AudioSettings", | ||||||
|     "Pipeline", |     "Pipeline", | ||||||
|     "PipelineEvent", |     "PipelineEvent", | ||||||
|     "PipelineEventType", |     "PipelineEventType", | ||||||
|     "PipelineNotFound", |     "PipelineNotFound", | ||||||
|     "VadSensitivity", |  | ||||||
|     "VadSensitivitySelect", |  | ||||||
|     "WakeWordSettings", |     "WakeWordSettings", | ||||||
|     "async_create_default_pipeline", |     "async_create_default_pipeline", | ||||||
|     "async_get_pipelines", |     "async_get_pipelines", | ||||||
|     "async_pipeline_from_audio_stream", |     "async_pipeline_from_audio_stream", | ||||||
|  |     "async_setup", | ||||||
|     "async_update_pipeline", |     "async_update_pipeline", | ||||||
| ) | ) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -19,14 +19,7 @@ import wave | |||||||
| import hass_nabucasa | import hass_nabucasa | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from homeassistant.components import ( | from homeassistant.components import conversation, stt, tts, wake_word, websocket_api | ||||||
|     conversation, |  | ||||||
|     media_player, |  | ||||||
|     stt, |  | ||||||
|     tts, |  | ||||||
|     wake_word, |  | ||||||
|     websocket_api, |  | ||||||
| ) |  | ||||||
| from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL | from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL | ||||||
| from homeassistant.core import Context, HomeAssistant, callback | from homeassistant.core import Context, HomeAssistant, callback | ||||||
| from homeassistant.exceptions import HomeAssistantError | from homeassistant.exceptions import HomeAssistantError | ||||||
| @@ -137,10 +130,7 @@ SAVE_DELAY = 10 | |||||||
| @callback | @callback | ||||||
| def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool: | def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool: | ||||||
|     """Filter out intents that are not local fallback.""" |     """Filter out intents that are not local fallback.""" | ||||||
|     return result.intent.name in ( |     return result.intent.name in (intent.INTENT_GET_STATE) | ||||||
|         intent.INTENT_GET_STATE, |  | ||||||
|         media_player.INTENT_MEDIA_SEARCH_AND_PLAY, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @callback | @callback | ||||||
|   | |||||||
| @@ -3,17 +3,17 @@ | |||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
| from abc import ABC, abstractmethod | from abc import ABC, abstractmethod | ||||||
|  | from collections import namedtuple | ||||||
| from collections.abc import Awaitable, Callable, Coroutine | from collections.abc import Awaitable, Callable, Coroutine | ||||||
| import functools | import functools | ||||||
| import logging | import logging | ||||||
| from typing import Any, NamedTuple | from typing import Any, cast | ||||||
|  |  | ||||||
| from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy | from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy | ||||||
| from aiohttp import ClientSession | from aiohttp import ClientSession | ||||||
| from asusrouter import AsusRouter, AsusRouterError | from asusrouter import AsusRouter, AsusRouterError | ||||||
| from asusrouter.config import ARConfigKey | from asusrouter.config import ARConfigKey | ||||||
| from asusrouter.modules.client import AsusClient | from asusrouter.modules.client import AsusClient | ||||||
| from asusrouter.modules.connection import ConnectionState |  | ||||||
| from asusrouter.modules.data import AsusData | from asusrouter.modules.data import AsusData | ||||||
| from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors | from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors | ||||||
| from asusrouter.tools.connection import get_cookie_jar | from asusrouter.tools.connection import get_cookie_jar | ||||||
| @@ -61,27 +61,11 @@ SENSORS_TYPE_RATES = "sensors_rates" | |||||||
| SENSORS_TYPE_TEMPERATURES = "sensors_temperatures" | SENSORS_TYPE_TEMPERATURES = "sensors_temperatures" | ||||||
| SENSORS_TYPE_UPTIME = "sensors_uptime" | SENSORS_TYPE_UPTIME = "sensors_uptime" | ||||||
|  |  | ||||||
|  | WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"])  # noqa: PYI024 | ||||||
| class WrtDevice(NamedTuple): |  | ||||||
|     """WrtDevice structure.""" |  | ||||||
|  |  | ||||||
|     ip: str | None |  | ||||||
|     name: str | None |  | ||||||
|     conneted_to: str | None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| type _FuncType[_T] = Callable[ | type _FuncType[_T] = Callable[[_T], Awaitable[list[Any] | tuple[Any] | dict[str, Any]]] | ||||||
|     [_T], |  | ||||||
|     Awaitable[ |  | ||||||
|         list[str] |  | ||||||
|         | tuple[float | None, float | None] |  | ||||||
|         | list[float] |  | ||||||
|         | dict[str, float | str | None] |  | ||||||
|         | dict[str, float] |  | ||||||
|     ], |  | ||||||
| ] |  | ||||||
| type _ReturnFuncType[_T] = Callable[[_T], Coroutine[Any, Any, dict[str, Any]]] | type _ReturnFuncType[_T] = Callable[[_T], Coroutine[Any, Any, dict[str, Any]]] | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -96,9 +80,7 @@ def handle_errors_and_zip[_AsusWrtBridgeT: AsusWrtBridge]( | |||||||
|         """Run library methods and zip results or manage exceptions.""" |         """Run library methods and zip results or manage exceptions.""" | ||||||
|  |  | ||||||
|         @functools.wraps(func) |         @functools.wraps(func) | ||||||
|         async def _wrapper( |         async def _wrapper(self: _AsusWrtBridgeT) -> dict[str, Any]: | ||||||
|             self: _AsusWrtBridgeT, |  | ||||||
|         ) -> dict[str, float | str | None] | dict[str, float]: |  | ||||||
|             try: |             try: | ||||||
|                 data = await func(self) |                 data = await func(self) | ||||||
|             except exceptions as exc: |             except exceptions as exc: | ||||||
| @@ -125,9 +107,7 @@ class AsusWrtBridge(ABC): | |||||||
|  |  | ||||||
|     @staticmethod |     @staticmethod | ||||||
|     def get_bridge( |     def get_bridge( | ||||||
|         hass: HomeAssistant, |         hass: HomeAssistant, conf: dict[str, Any], options: dict[str, Any] | None = None | ||||||
|         conf: dict[str, str | int], |  | ||||||
|         options: dict[str, str | bool | int] | None = None, |  | ||||||
|     ) -> AsusWrtBridge: |     ) -> AsusWrtBridge: | ||||||
|         """Get Bridge instance.""" |         """Get Bridge instance.""" | ||||||
|         if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP): |         if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP): | ||||||
| @@ -239,7 +219,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge): | |||||||
|     @property |     @property | ||||||
|     def is_connected(self) -> bool: |     def is_connected(self) -> bool: | ||||||
|         """Get connected status.""" |         """Get connected status.""" | ||||||
|         return self._api.is_connected |         return cast(bool, self._api.is_connected) | ||||||
|  |  | ||||||
|     async def async_connect(self) -> None: |     async def async_connect(self) -> None: | ||||||
|         """Connect to the device.""" |         """Connect to the device.""" | ||||||
| @@ -255,7 +235,8 @@ class AsusWrtLegacyBridge(AsusWrtBridge): | |||||||
|  |  | ||||||
|     async def async_disconnect(self) -> None: |     async def async_disconnect(self) -> None: | ||||||
|         """Disconnect to the device.""" |         """Disconnect to the device.""" | ||||||
|         await self._api.async_disconnect() |         if self._api is not None and self._protocol == PROTOCOL_TELNET: | ||||||
|  |             self._api.connection.disconnect() | ||||||
|  |  | ||||||
|     async def async_get_connected_devices(self) -> dict[str, WrtDevice]: |     async def async_get_connected_devices(self) -> dict[str, WrtDevice]: | ||||||
|         """Get list of connected devices.""" |         """Get list of connected devices.""" | ||||||
| @@ -326,22 +307,22 @@ class AsusWrtLegacyBridge(AsusWrtBridge): | |||||||
|         return [SENSORS_TEMPERATURES_LEGACY[i] for i in range(3) if availability[i]] |         return [SENSORS_TEMPERATURES_LEGACY[i] for i in range(3) if availability[i]] | ||||||
|  |  | ||||||
|     @handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_BYTES) |     @handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_BYTES) | ||||||
|     async def _get_bytes(self) -> tuple[float | None, float | None]: |     async def _get_bytes(self) -> Any: | ||||||
|         """Fetch byte information from the router.""" |         """Fetch byte information from the router.""" | ||||||
|         return await self._api.async_get_bytes_total() |         return await self._api.async_get_bytes_total() | ||||||
|  |  | ||||||
|     @handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_RATES) |     @handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_RATES) | ||||||
|     async def _get_rates(self) -> tuple[float, float]: |     async def _get_rates(self) -> Any: | ||||||
|         """Fetch rates information from the router.""" |         """Fetch rates information from the router.""" | ||||||
|         return await self._api.async_get_current_transfer_rates() |         return await self._api.async_get_current_transfer_rates() | ||||||
|  |  | ||||||
|     @handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_LOAD_AVG) |     @handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_LOAD_AVG) | ||||||
|     async def _get_load_avg(self) -> list[float]: |     async def _get_load_avg(self) -> Any: | ||||||
|         """Fetch load average information from the router.""" |         """Fetch load average information from the router.""" | ||||||
|         return await self._api.async_get_loadavg() |         return await self._api.async_get_loadavg() | ||||||
|  |  | ||||||
|     @handle_errors_and_zip((OSError, ValueError), None) |     @handle_errors_and_zip((OSError, ValueError), None) | ||||||
|     async def _get_temperatures(self) -> dict[str, float]: |     async def _get_temperatures(self) -> Any: | ||||||
|         """Fetch temperatures information from the router.""" |         """Fetch temperatures information from the router.""" | ||||||
|         return await self._api.async_get_temperature() |         return await self._api.async_get_temperature() | ||||||
|  |  | ||||||
| @@ -456,7 +437,6 @@ class AsusWrtHttpBridge(AsusWrtBridge): | |||||||
|             if dev.connection is not None |             if dev.connection is not None | ||||||
|             and dev.description is not None |             and dev.description is not None | ||||||
|             and dev.connection.ip_address is not None |             and dev.connection.ip_address is not None | ||||||
|             and dev.state is ConnectionState.CONNECTED |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]: |     async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]: | ||||||
|   | |||||||
| @@ -175,12 +175,12 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN): | |||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     async def _async_check_connection( |     async def _async_check_connection( | ||||||
|         self, user_input: dict[str, str | int] |         self, user_input: dict[str, Any] | ||||||
|     ) -> tuple[str, str | None]: |     ) -> tuple[str, str | None]: | ||||||
|         """Attempt to connect the AsusWrt router.""" |         """Attempt to connect the AsusWrt router.""" | ||||||
|  |  | ||||||
|         api: AsusWrtBridge |         api: AsusWrtBridge | ||||||
|         host = user_input[CONF_HOST] |         host: str = user_input[CONF_HOST] | ||||||
|         protocol = user_input[CONF_PROTOCOL] |         protocol = user_input[CONF_PROTOCOL] | ||||||
|         error: str | None = None |         error: str | None = None | ||||||
|  |  | ||||||
|   | |||||||
| @@ -10,6 +10,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | |||||||
| from . import AsusWrtConfigEntry | from . import AsusWrtConfigEntry | ||||||
| from .router import AsusWrtDevInfo, AsusWrtRouter | from .router import AsusWrtDevInfo, AsusWrtRouter | ||||||
|  |  | ||||||
|  | ATTR_LAST_TIME_REACHABLE = "last_time_reachable" | ||||||
|  |  | ||||||
| DEFAULT_DEVICE_NAME = "Unknown device" | DEFAULT_DEVICE_NAME = "Unknown device" | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -56,6 +58,8 @@ def add_entities( | |||||||
| class AsusWrtDevice(ScannerEntity): | class AsusWrtDevice(ScannerEntity): | ||||||
|     """Representation of a AsusWrt device.""" |     """Representation of a AsusWrt device.""" | ||||||
|  |  | ||||||
|  |     _unrecorded_attributes = frozenset({ATTR_LAST_TIME_REACHABLE}) | ||||||
|  |  | ||||||
|     _attr_should_poll = False |     _attr_should_poll = False | ||||||
|  |  | ||||||
|     def __init__(self, router: AsusWrtRouter, device: AsusWrtDevInfo) -> None: |     def __init__(self, router: AsusWrtRouter, device: AsusWrtDevInfo) -> None: | ||||||
| @@ -93,6 +97,11 @@ class AsusWrtDevice(ScannerEntity): | |||||||
|     def async_on_demand_update(self) -> None: |     def async_on_demand_update(self) -> None: | ||||||
|         """Update state.""" |         """Update state.""" | ||||||
|         self._device = self._router.devices[self._device.mac] |         self._device = self._router.devices[self._device.mac] | ||||||
|  |         self._attr_extra_state_attributes = {} | ||||||
|  |         if self._device.last_activity: | ||||||
|  |             self._attr_extra_state_attributes[ATTR_LAST_TIME_REACHABLE] = ( | ||||||
|  |                 self._device.last_activity.isoformat(timespec="seconds") | ||||||
|  |             ) | ||||||
|         self.async_write_ha_state() |         self.async_write_ha_state() | ||||||
|  |  | ||||||
|     async def async_added_to_hass(self) -> None: |     async def async_added_to_hass(self) -> None: | ||||||
|   | |||||||
| @@ -7,5 +7,5 @@ | |||||||
|   "integration_type": "hub", |   "integration_type": "hub", | ||||||
|   "iot_class": "local_polling", |   "iot_class": "local_polling", | ||||||
|   "loggers": ["aioasuswrt", "asusrouter", "asyncssh"], |   "loggers": ["aioasuswrt", "asusrouter", "asyncssh"], | ||||||
|   "requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"] |   "requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -176,7 +176,7 @@ class AsusWrtRouter: | |||||||
|  |  | ||||||
|         self._on_close: list[Callable] = [] |         self._on_close: list[Callable] = [] | ||||||
|  |  | ||||||
|         self._options: dict[str, str | bool | int] = { |         self._options: dict[str, Any] = { | ||||||
|             CONF_DNSMASQ: DEFAULT_DNSMASQ, |             CONF_DNSMASQ: DEFAULT_DNSMASQ, | ||||||
|             CONF_INTERFACE: DEFAULT_INTERFACE, |             CONF_INTERFACE: DEFAULT_INTERFACE, | ||||||
|             CONF_REQUIRE_IP: True, |             CONF_REQUIRE_IP: True, | ||||||
| @@ -299,10 +299,12 @@ class AsusWrtRouter: | |||||||
|             _LOGGER.warning("Reconnected to ASUS router %s", self.host) |             _LOGGER.warning("Reconnected to ASUS router %s", self.host) | ||||||
|  |  | ||||||
|         self._connected_devices = len(wrt_devices) |         self._connected_devices = len(wrt_devices) | ||||||
|         consider_home = int( |         consider_home: int = self._options.get( | ||||||
|             self._options.get(CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds()) |             CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() | ||||||
|  |         ) | ||||||
|  |         track_unknown: bool = self._options.get( | ||||||
|  |             CONF_TRACK_UNKNOWN, DEFAULT_TRACK_UNKNOWN | ||||||
|         ) |         ) | ||||||
|         track_unknown = self._options.get(CONF_TRACK_UNKNOWN, DEFAULT_TRACK_UNKNOWN) |  | ||||||
|  |  | ||||||
|         for device_mac, device in self._devices.items(): |         for device_mac, device in self._devices.items(): | ||||||
|             dev_info = wrt_devices.pop(device_mac, None) |             dev_info = wrt_devices.pop(device_mac, None) | ||||||
|   | |||||||
| @@ -36,14 +36,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo | |||||||
|         raise ConfigEntryAuthFailed("Migration to OAuth required") |         raise ConfigEntryAuthFailed("Migration to OAuth required") | ||||||
|  |  | ||||||
|     session = async_create_august_clientsession(hass) |     session = async_create_august_clientsession(hass) | ||||||
|     try: |  | ||||||
|     implementation = ( |     implementation = ( | ||||||
|         await config_entry_oauth2_flow.async_get_config_entry_implementation( |         await config_entry_oauth2_flow.async_get_config_entry_implementation( | ||||||
|             hass, entry |             hass, entry | ||||||
|         ) |         ) | ||||||
|     ) |     ) | ||||||
|     except ValueError as err: |  | ||||||
|         raise ConfigEntryNotReady("OAuth implementation not available") from err |  | ||||||
|     oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) |     oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) | ||||||
|     august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session) |     august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session) | ||||||
|     try: |     try: | ||||||
|   | |||||||
| @@ -5,5 +5,5 @@ | |||||||
|   "config_flow": true, |   "config_flow": true, | ||||||
|   "documentation": "https://www.home-assistant.io/integrations/autarco", |   "documentation": "https://www.home-assistant.io/integrations/autarco", | ||||||
|   "iot_class": "cloud_polling", |   "iot_class": "cloud_polling", | ||||||
|   "requirements": ["autarco==3.2.0"] |   "requirements": ["autarco==3.1.0"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -136,8 +136,8 @@ class WellKnownOAuthInfoView(HomeAssistantView): | |||||||
|             url_prefix = get_url(hass, require_current_request=True) |             url_prefix = get_url(hass, require_current_request=True) | ||||||
|         except NoURLAvailableError: |         except NoURLAvailableError: | ||||||
|             url_prefix = "" |             url_prefix = "" | ||||||
|  |         return self.json( | ||||||
|         metadata = { |             { | ||||||
|                 "authorization_endpoint": f"{url_prefix}/auth/authorize", |                 "authorization_endpoint": f"{url_prefix}/auth/authorize", | ||||||
|                 "token_endpoint": f"{url_prefix}/auth/token", |                 "token_endpoint": f"{url_prefix}/auth/token", | ||||||
|                 "revocation_endpoint": f"{url_prefix}/auth/revoke", |                 "revocation_endpoint": f"{url_prefix}/auth/revoke", | ||||||
| @@ -146,12 +146,7 @@ class WellKnownOAuthInfoView(HomeAssistantView): | |||||||
|                     "https://developers.home-assistant.io/docs/auth_api" |                     "https://developers.home-assistant.io/docs/auth_api" | ||||||
|                 ), |                 ), | ||||||
|             } |             } | ||||||
|  |         ) | ||||||
|         # Add issuer only when we have a valid base URL (RFC 8414 compliance) |  | ||||||
|         if url_prefix: |  | ||||||
|             metadata["issuer"] = url_prefix |  | ||||||
|  |  | ||||||
|         return self.json(metadata) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AuthProvidersView(HomeAssistantView): | class AuthProvidersView(HomeAssistantView): | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ | |||||||
|       "step": { |       "step": { | ||||||
|         "init": { |         "init": { | ||||||
|           "title": "Set up two-factor authentication using TOTP", |           "title": "Set up two-factor authentication using TOTP", | ||||||
|           "description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**." |           "description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator](https://support.google.com/accounts/answer/1066447) or [Authy](https://authy.com/).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**." | ||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|   | |||||||
| @@ -146,7 +146,7 @@ | |||||||
|         }, |         }, | ||||||
|         "state": { |         "state": { | ||||||
|           "title": "Add a Bayesian sensor", |           "title": "Add a Bayesian sensor", | ||||||
|           "description": "Add an observation which evaluates to `True` when the value of the sensor exactly matches *'To state'*. When `False`, it will update the prior with probabilities that are the inverse of those set below. This behavior can be overridden by adding observations for the same entity's other states.", |           "description": "Add an observation which evaluates to `True` when the value of the sensor exactly matches *'To state'*. When `False`, it will update the prior with probabilities that are the inverse of those set below. This behaviour can be overridden by adding observations for the same entity's other states.", | ||||||
|  |  | ||||||
|           "data": { |           "data": { | ||||||
|             "name": "[%key:common::config_flow::data::name%]", |             "name": "[%key:common::config_flow::data::name%]", | ||||||
|   | |||||||
| @@ -57,7 +57,6 @@ from .api import ( | |||||||
|     _get_manager, |     _get_manager, | ||||||
|     async_address_present, |     async_address_present, | ||||||
|     async_ble_device_from_address, |     async_ble_device_from_address, | ||||||
|     async_clear_address_from_match_history, |  | ||||||
|     async_current_scanners, |     async_current_scanners, | ||||||
|     async_discovered_service_info, |     async_discovered_service_info, | ||||||
|     async_get_advertisement_callback, |     async_get_advertisement_callback, | ||||||
| @@ -113,9 +112,9 @@ __all__ = [ | |||||||
|     "BluetoothServiceInfo", |     "BluetoothServiceInfo", | ||||||
|     "BluetoothServiceInfoBleak", |     "BluetoothServiceInfoBleak", | ||||||
|     "HaBluetoothConnector", |     "HaBluetoothConnector", | ||||||
|  |     "HomeAssistantRemoteScanner", | ||||||
|     "async_address_present", |     "async_address_present", | ||||||
|     "async_ble_device_from_address", |     "async_ble_device_from_address", | ||||||
|     "async_clear_address_from_match_history", |  | ||||||
|     "async_current_scanners", |     "async_current_scanners", | ||||||
|     "async_discovered_service_info", |     "async_discovered_service_info", | ||||||
|     "async_get_advertisement_callback", |     "async_get_advertisement_callback", | ||||||
|   | |||||||
| @@ -193,20 +193,6 @@ def async_rediscover_address(hass: HomeAssistant, address: str) -> None: | |||||||
|     _get_manager(hass).async_rediscover_address(address) |     _get_manager(hass).async_rediscover_address(address) | ||||||
|  |  | ||||||
|  |  | ||||||
| @hass_callback |  | ||||||
| def async_clear_address_from_match_history(hass: HomeAssistant, address: str) -> None: |  | ||||||
|     """Clear an address from the integration matcher history. |  | ||||||
|  |  | ||||||
|     This allows future advertisements from this address to trigger discovery |  | ||||||
|     even if the advertisement content has changed but the service data UUIDs |  | ||||||
|     remain the same. |  | ||||||
|  |  | ||||||
|     Unlike async_rediscover_address, this does not immediately re-trigger |  | ||||||
|     discovery with the current advertisement in history. |  | ||||||
|     """ |  | ||||||
|     _get_manager(hass).async_clear_address_from_match_history(address) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @hass_callback | @hass_callback | ||||||
| def async_register_scanner( | def async_register_scanner( | ||||||
|     hass: HomeAssistant, |     hass: HomeAssistant, | ||||||
|   | |||||||
| @@ -120,19 +120,6 @@ class HomeAssistantBluetoothManager(BluetoothManager): | |||||||
|         if service_info := self._all_history.get(address): |         if service_info := self._all_history.get(address): | ||||||
|             self._async_trigger_matching_discovery(service_info) |             self._async_trigger_matching_discovery(service_info) | ||||||
|  |  | ||||||
|     @hass_callback |  | ||||||
|     def async_clear_address_from_match_history(self, address: str) -> None: |  | ||||||
|         """Clear an address from the integration matcher history. |  | ||||||
|  |  | ||||||
|         This allows future advertisements from this address to trigger discovery |  | ||||||
|         even if the advertisement content has changed but the service data UUIDs |  | ||||||
|         remain the same. |  | ||||||
|  |  | ||||||
|         Unlike async_rediscover_address, this does not immediately re-trigger |  | ||||||
|         discovery with the current advertisement in history. |  | ||||||
|         """ |  | ||||||
|         self._integration_matcher.async_clear_address(address) |  | ||||||
|  |  | ||||||
|     def _discover_service_info(self, service_info: BluetoothServiceInfoBleak) -> None: |     def _discover_service_info(self, service_info: BluetoothServiceInfoBleak) -> None: | ||||||
|         matched_domains = self._integration_matcher.match_domains(service_info) |         matched_domains = self._integration_matcher.match_domains(service_info) | ||||||
|         if self._debug: |         if self._debug: | ||||||
|   | |||||||
| @@ -68,17 +68,12 @@ class IntegrationMatchHistory: | |||||||
|     manufacturer_data: bool |     manufacturer_data: bool | ||||||
|     service_data: set[str] |     service_data: set[str] | ||||||
|     service_uuids: set[str] |     service_uuids: set[str] | ||||||
|     name: str |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def seen_all_fields( | def seen_all_fields( | ||||||
|     previous_match: IntegrationMatchHistory, |     previous_match: IntegrationMatchHistory, advertisement_data: AdvertisementData | ||||||
|     advertisement_data: AdvertisementData, |  | ||||||
|     name: str, |  | ||||||
| ) -> bool: | ) -> bool: | ||||||
|     """Return if we have seen all fields.""" |     """Return if we have seen all fields.""" | ||||||
|     if previous_match.name != name: |  | ||||||
|         return False |  | ||||||
|     if not previous_match.manufacturer_data and advertisement_data.manufacturer_data: |     if not previous_match.manufacturer_data and advertisement_data.manufacturer_data: | ||||||
|         return False |         return False | ||||||
|     if advertisement_data.service_data and ( |     if advertisement_data.service_data and ( | ||||||
| @@ -127,11 +122,10 @@ class IntegrationMatcher: | |||||||
|         device = service_info.device |         device = service_info.device | ||||||
|         advertisement_data = service_info.advertisement |         advertisement_data = service_info.advertisement | ||||||
|         connectable = service_info.connectable |         connectable = service_info.connectable | ||||||
|         name = service_info.name |  | ||||||
|         matched = self._matched_connectable if connectable else self._matched |         matched = self._matched_connectable if connectable else self._matched | ||||||
|         matched_domains: set[str] = set() |         matched_domains: set[str] = set() | ||||||
|         if (previous_match := matched.get(device.address)) and seen_all_fields( |         if (previous_match := matched.get(device.address)) and seen_all_fields( | ||||||
|             previous_match, advertisement_data, name |             previous_match, advertisement_data | ||||||
|         ): |         ): | ||||||
|             # We have seen all fields so we can skip the rest of the matchers |             # We have seen all fields so we can skip the rest of the matchers | ||||||
|             return matched_domains |             return matched_domains | ||||||
| @@ -146,13 +140,11 @@ class IntegrationMatcher: | |||||||
|             ) |             ) | ||||||
|             previous_match.service_data |= set(advertisement_data.service_data) |             previous_match.service_data |= set(advertisement_data.service_data) | ||||||
|             previous_match.service_uuids |= set(advertisement_data.service_uuids) |             previous_match.service_uuids |= set(advertisement_data.service_uuids) | ||||||
|             previous_match.name = name |  | ||||||
|         else: |         else: | ||||||
|             matched[device.address] = IntegrationMatchHistory( |             matched[device.address] = IntegrationMatchHistory( | ||||||
|                 manufacturer_data=bool(advertisement_data.manufacturer_data), |                 manufacturer_data=bool(advertisement_data.manufacturer_data), | ||||||
|                 service_data=set(advertisement_data.service_data), |                 service_data=set(advertisement_data.service_data), | ||||||
|                 service_uuids=set(advertisement_data.service_uuids), |                 service_uuids=set(advertisement_data.service_uuids), | ||||||
|                 name=name, |  | ||||||
|             ) |             ) | ||||||
|         return matched_domains |         return matched_domains | ||||||
|  |  | ||||||
|   | |||||||
| @@ -8,7 +8,7 @@ | |||||||
|   "integration_type": "device", |   "integration_type": "device", | ||||||
|   "iot_class": "local_polling", |   "iot_class": "local_polling", | ||||||
|   "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], |   "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], | ||||||
|   "requirements": ["brother==5.1.1"], |   "requirements": ["brother==5.1.0"], | ||||||
|   "zeroconf": [ |   "zeroconf": [ | ||||||
|     { |     { | ||||||
|       "type": "_printer._tcp.local.", |       "type": "_printer._tcp.local.", | ||||||
|   | |||||||
| @@ -7,14 +7,12 @@ from typing import Any | |||||||
| from evolutionhttp import BryantEvolutionLocalClient | from evolutionhttp import BryantEvolutionLocalClient | ||||||
|  |  | ||||||
| from homeassistant.components.climate import ( | from homeassistant.components.climate import ( | ||||||
|     ATTR_TARGET_TEMP_HIGH, |  | ||||||
|     ATTR_TARGET_TEMP_LOW, |  | ||||||
|     ClimateEntity, |     ClimateEntity, | ||||||
|     ClimateEntityFeature, |     ClimateEntityFeature, | ||||||
|     HVACAction, |     HVACAction, | ||||||
|     HVACMode, |     HVACMode, | ||||||
| ) | ) | ||||||
| from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature | from homeassistant.const import UnitOfTemperature | ||||||
| from homeassistant.core import HomeAssistant | from homeassistant.core import HomeAssistant | ||||||
| from homeassistant.exceptions import HomeAssistantError | from homeassistant.exceptions import HomeAssistantError | ||||||
| from homeassistant.helpers.device_registry import DeviceInfo | from homeassistant.helpers.device_registry import DeviceInfo | ||||||
| @@ -210,24 +208,24 @@ class BryantEvolutionClimate(ClimateEntity): | |||||||
|  |  | ||||||
|     async def async_set_temperature(self, **kwargs: Any) -> None: |     async def async_set_temperature(self, **kwargs: Any) -> None: | ||||||
|         """Set new target temperature.""" |         """Set new target temperature.""" | ||||||
|         if value := kwargs.get(ATTR_TARGET_TEMP_HIGH): |         if kwargs.get("target_temp_high"): | ||||||
|             temp = int(value) |             temp = int(kwargs["target_temp_high"]) | ||||||
|             if not await self._client.set_cooling_setpoint(temp): |             if not await self._client.set_cooling_setpoint(temp): | ||||||
|                 raise HomeAssistantError( |                 raise HomeAssistantError( | ||||||
|                     translation_domain=DOMAIN, translation_key="failed_to_set_clsp" |                     translation_domain=DOMAIN, translation_key="failed_to_set_clsp" | ||||||
|                 ) |                 ) | ||||||
|             self._attr_target_temperature_high = temp |             self._attr_target_temperature_high = temp | ||||||
|  |  | ||||||
|         if value := kwargs.get(ATTR_TARGET_TEMP_LOW): |         if kwargs.get("target_temp_low"): | ||||||
|             temp = int(value) |             temp = int(kwargs["target_temp_low"]) | ||||||
|             if not await self._client.set_heating_setpoint(temp): |             if not await self._client.set_heating_setpoint(temp): | ||||||
|                 raise HomeAssistantError( |                 raise HomeAssistantError( | ||||||
|                     translation_domain=DOMAIN, translation_key="failed_to_set_htsp" |                     translation_domain=DOMAIN, translation_key="failed_to_set_htsp" | ||||||
|                 ) |                 ) | ||||||
|             self._attr_target_temperature_low = temp |             self._attr_target_temperature_low = temp | ||||||
|  |  | ||||||
|         if value := kwargs.get(ATTR_TEMPERATURE): |         if kwargs.get("temperature"): | ||||||
|             temp = int(value) |             temp = int(kwargs["temperature"]) | ||||||
|             fn = ( |             fn = ( | ||||||
|                 self._client.set_heating_setpoint |                 self._client.set_heating_setpoint | ||||||
|                 if self.hvac_mode == HVACMode.HEAT |                 if self.hvac_mode == HVACMode.HEAT | ||||||
|   | |||||||
| @@ -3,20 +3,15 @@ | |||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from functools import partial |  | ||||||
| import logging | import logging | ||||||
| from typing import Any |  | ||||||
|  |  | ||||||
| import caldav | import caldav | ||||||
| from caldav.lib.error import DAVError |  | ||||||
| import requests |  | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from homeassistant.components.calendar import ( | from homeassistant.components.calendar import ( | ||||||
|     ENTITY_ID_FORMAT, |     ENTITY_ID_FORMAT, | ||||||
|     PLATFORM_SCHEMA as CALENDAR_PLATFORM_SCHEMA, |     PLATFORM_SCHEMA as CALENDAR_PLATFORM_SCHEMA, | ||||||
|     CalendarEntity, |     CalendarEntity, | ||||||
|     CalendarEntityFeature, |  | ||||||
|     CalendarEvent, |     CalendarEvent, | ||||||
|     is_offset_reached, |     is_offset_reached, | ||||||
| ) | ) | ||||||
| @@ -28,7 +23,6 @@ from homeassistant.const import ( | |||||||
|     CONF_VERIFY_SSL, |     CONF_VERIFY_SSL, | ||||||
| ) | ) | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant, callback | ||||||
| from homeassistant.exceptions import HomeAssistantError |  | ||||||
| from homeassistant.helpers import config_validation as cv | from homeassistant.helpers import config_validation as cv | ||||||
| from homeassistant.helpers.entity import async_generate_entity_id | from homeassistant.helpers.entity import async_generate_entity_id | ||||||
| from homeassistant.helpers.entity_platform import ( | from homeassistant.helpers.entity_platform import ( | ||||||
| @@ -181,8 +175,6 @@ async def async_setup_entry( | |||||||
| class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarEntity): | class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarEntity): | ||||||
|     """A device for getting the next Task from a WebDav Calendar.""" |     """A device for getting the next Task from a WebDav Calendar.""" | ||||||
|  |  | ||||||
|     _attr_supported_features = CalendarEntityFeature.CREATE_EVENT |  | ||||||
|  |  | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         name: str | None, |         name: str | None, | ||||||
| @@ -211,31 +203,6 @@ class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarE | |||||||
|         """Get all events in a specific time frame.""" |         """Get all events in a specific time frame.""" | ||||||
|         return await self.coordinator.async_get_events(hass, start_date, end_date) |         return await self.coordinator.async_get_events(hass, start_date, end_date) | ||||||
|  |  | ||||||
|     async def async_create_event(self, **kwargs: Any) -> None: |  | ||||||
|         """Create a new event in the calendar.""" |  | ||||||
|         _LOGGER.debug("Event: %s", kwargs) |  | ||||||
|  |  | ||||||
|         item_data: dict[str, Any] = { |  | ||||||
|             "summary": kwargs["summary"], |  | ||||||
|             "dtstart": kwargs["dtstart"], |  | ||||||
|             "dtend": kwargs["dtend"], |  | ||||||
|         } |  | ||||||
|         if description := kwargs.get("description"): |  | ||||||
|             item_data["description"] = description |  | ||||||
|         if location := kwargs.get("location"): |  | ||||||
|             item_data["location"] = location |  | ||||||
|         if rrule := kwargs.get("rrule"): |  | ||||||
|             item_data["rrule"] = rrule |  | ||||||
|  |  | ||||||
|         _LOGGER.debug("ICS data %s", item_data) |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             await self.hass.async_add_executor_job( |  | ||||||
|                 partial(self.coordinator.calendar.add_event, **item_data), |  | ||||||
|             ) |  | ||||||
|         except (requests.ConnectionError, DAVError) as err: |  | ||||||
|             raise HomeAssistantError(f"CalDAV save error: {err}") from err |  | ||||||
|  |  | ||||||
|     @callback |     @callback | ||||||
|     def _handle_coordinator_update(self) -> None: |     def _handle_coordinator_update(self) -> None: | ||||||
|         """Update event data.""" |         """Update event data.""" | ||||||
|   | |||||||
| @@ -169,7 +169,7 @@ class CalendarEventListener: | |||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         hass: HomeAssistant, |         hass: HomeAssistant, | ||||||
|         job: HassJob[..., Coroutine[Any, Any, None] | Any], |         job: HassJob[..., Coroutine[Any, Any, None]], | ||||||
|         trigger_data: dict[str, Any], |         trigger_data: dict[str, Any], | ||||||
|         fetcher: QueuedEventFetcher, |         fetcher: QueuedEventFetcher, | ||||||
|     ) -> None: |     ) -> None: | ||||||
|   | |||||||
| @@ -74,10 +74,7 @@ from .const import ( | |||||||
|     StreamType, |     StreamType, | ||||||
| ) | ) | ||||||
| from .helper import get_camera_from_entity_id | from .helper import get_camera_from_entity_id | ||||||
| from .img_util import ( | from .img_util import scale_jpeg_camera_image | ||||||
|     TurboJPEGSingleton,  # noqa: F401 |  | ||||||
|     scale_jpeg_camera_image, |  | ||||||
| ) |  | ||||||
| from .prefs import ( | from .prefs import ( | ||||||
|     CameraPreferences, |     CameraPreferences, | ||||||
|     DynamicStreamSettings,  # noqa: F401 |     DynamicStreamSettings,  # noqa: F401 | ||||||
|   | |||||||
| @@ -31,7 +31,7 @@ async def async_setup_entry( | |||||||
|         for location_id, location in coordinator.data["locations"].items() |         for location_id, location in coordinator.data["locations"].items() | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     async_add_entities(alarms) |     async_add_entities(alarms, True) | ||||||
|  |  | ||||||
|  |  | ||||||
| class CanaryAlarm( | class CanaryAlarm( | ||||||
|   | |||||||
| @@ -68,7 +68,8 @@ async def async_setup_entry( | |||||||
|             for location_id, location in coordinator.data["locations"].items() |             for location_id, location in coordinator.data["locations"].items() | ||||||
|             for device in location.devices |             for device in location.devices | ||||||
|             if device.is_online |             if device.is_online | ||||||
|         ) |         ), | ||||||
|  |         True, | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -80,7 +80,7 @@ async def async_setup_entry( | |||||||
|                     if device_type.get("name") in sensor_type[4] |                     if device_type.get("name") in sensor_type[4] | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|     async_add_entities(sensors) |     async_add_entities(sensors, True) | ||||||
|  |  | ||||||
|  |  | ||||||
| class CanarySensor(CoordinatorEntity[CanaryDataUpdateCoordinator], SensorEntity): | class CanarySensor(CoordinatorEntity[CanaryDataUpdateCoordinator], SensorEntity): | ||||||
|   | |||||||
| @@ -4,6 +4,5 @@ | |||||||
|   "codeowners": [], |   "codeowners": [], | ||||||
|   "documentation": "https://www.home-assistant.io/integrations/citybikes", |   "documentation": "https://www.home-assistant.io/integrations/citybikes", | ||||||
|   "iot_class": "cloud_polling", |   "iot_class": "cloud_polling", | ||||||
|   "quality_scale": "legacy", |   "quality_scale": "legacy" | ||||||
|   "requirements": ["python-citybikes==0.3.3"] |  | ||||||
| } | } | ||||||
|   | |||||||
| @@ -5,11 +5,8 @@ from __future__ import annotations | |||||||
| import asyncio | import asyncio | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| import logging | import logging | ||||||
| import sys |  | ||||||
|  |  | ||||||
| import aiohttp | import aiohttp | ||||||
| from citybikes import __version__ as CITYBIKES_CLIENT_VERSION |  | ||||||
| from citybikes.asyncio import Client as CitybikesClient |  | ||||||
| import voluptuous as vol | import voluptuous as vol | ||||||
|  |  | ||||||
| from homeassistant.components.sensor import ( | from homeassistant.components.sensor import ( | ||||||
| @@ -18,18 +15,21 @@ from homeassistant.components.sensor import ( | |||||||
|     SensorEntity, |     SensorEntity, | ||||||
| ) | ) | ||||||
| from homeassistant.const import ( | from homeassistant.const import ( | ||||||
|     APPLICATION_NAME, |     ATTR_ID, | ||||||
|  |     ATTR_LATITUDE, | ||||||
|  |     ATTR_LOCATION, | ||||||
|  |     ATTR_LONGITUDE, | ||||||
|  |     ATTR_NAME, | ||||||
|     CONF_LATITUDE, |     CONF_LATITUDE, | ||||||
|     CONF_LONGITUDE, |     CONF_LONGITUDE, | ||||||
|     CONF_NAME, |     CONF_NAME, | ||||||
|     CONF_RADIUS, |     CONF_RADIUS, | ||||||
|     EVENT_HOMEASSISTANT_CLOSE, |  | ||||||
|     UnitOfLength, |     UnitOfLength, | ||||||
|     __version__, |  | ||||||
| ) | ) | ||||||
| from homeassistant.core import HomeAssistant | from homeassistant.core import HomeAssistant | ||||||
| from homeassistant.exceptions import PlatformNotReady | from homeassistant.exceptions import PlatformNotReady | ||||||
| from homeassistant.helpers import config_validation as cv | from homeassistant.helpers import config_validation as cv | ||||||
|  | from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||||
| from homeassistant.helpers.entity import async_generate_entity_id | from homeassistant.helpers.entity import async_generate_entity_id | ||||||
| from homeassistant.helpers.entity_platform import AddEntitiesCallback | from homeassistant.helpers.entity_platform import AddEntitiesCallback | ||||||
| from homeassistant.helpers.event import async_track_time_interval | from homeassistant.helpers.event import async_track_time_interval | ||||||
| @@ -40,33 +40,31 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM | |||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| HA_USER_AGENT = ( |  | ||||||
|     f"{APPLICATION_NAME}/{__version__} " |  | ||||||
|     f"python-citybikes/{CITYBIKES_CLIENT_VERSION} " |  | ||||||
|     f"Python/{sys.version_info[0]}.{sys.version_info[1]}" |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| ATTR_UID = "uid" |  | ||||||
| ATTR_LATITUDE = "latitude" |  | ||||||
| ATTR_LONGITUDE = "longitude" |  | ||||||
| ATTR_EMPTY_SLOTS = "empty_slots" | ATTR_EMPTY_SLOTS = "empty_slots" | ||||||
|  | ATTR_EXTRA = "extra" | ||||||
|  | ATTR_FREE_BIKES = "free_bikes" | ||||||
|  | ATTR_NETWORK = "network" | ||||||
|  | ATTR_NETWORKS_LIST = "networks" | ||||||
|  | ATTR_STATIONS_LIST = "stations" | ||||||
| ATTR_TIMESTAMP = "timestamp" | ATTR_TIMESTAMP = "timestamp" | ||||||
|  | ATTR_UID = "uid" | ||||||
|  |  | ||||||
| CONF_NETWORK = "network" | CONF_NETWORK = "network" | ||||||
| CONF_STATIONS_LIST = "stations" | CONF_STATIONS_LIST = "stations" | ||||||
|  |  | ||||||
|  | DEFAULT_ENDPOINT = "https://api.citybik.es/{uri}" | ||||||
| PLATFORM = "citybikes" | PLATFORM = "citybikes" | ||||||
|  |  | ||||||
| MONITORED_NETWORKS = "monitored-networks" | MONITORED_NETWORKS = "monitored-networks" | ||||||
|  |  | ||||||
| DATA_CLIENT = "client" |  | ||||||
|  |  | ||||||
| NETWORKS_URI = "v2/networks" | NETWORKS_URI = "v2/networks" | ||||||
|  |  | ||||||
| REQUEST_TIMEOUT = aiohttp.ClientTimeout(total=5) | REQUEST_TIMEOUT = 5  # In seconds; argument to asyncio.timeout | ||||||
|  |  | ||||||
| SCAN_INTERVAL = timedelta(minutes=5)  # Timely, and doesn't suffocate the API | SCAN_INTERVAL = timedelta(minutes=5)  # Timely, and doesn't suffocate the API | ||||||
|  |  | ||||||
|  | STATIONS_URI = "v2/networks/{uid}?fields=network.stations" | ||||||
|  |  | ||||||
| CITYBIKES_ATTRIBUTION = ( | CITYBIKES_ATTRIBUTION = ( | ||||||
|     "Information provided by the CityBikes Project (https://citybik.es/#about)" |     "Information provided by the CityBikes Project (https://citybik.es/#about)" | ||||||
| ) | ) | ||||||
| @@ -89,6 +87,72 @@ PLATFORM_SCHEMA = vol.All( | |||||||
|     ), |     ), | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | NETWORK_SCHEMA = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_ID): cv.string, | ||||||
|  |         vol.Required(ATTR_NAME): cv.string, | ||||||
|  |         vol.Required(ATTR_LOCATION): vol.Schema( | ||||||
|  |             { | ||||||
|  |                 vol.Required(ATTR_LATITUDE): cv.latitude, | ||||||
|  |                 vol.Required(ATTR_LONGITUDE): cv.longitude, | ||||||
|  |             }, | ||||||
|  |             extra=vol.REMOVE_EXTRA, | ||||||
|  |         ), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | NETWORKS_RESPONSE_SCHEMA = vol.Schema( | ||||||
|  |     {vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA]} | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | STATION_SCHEMA = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_FREE_BIKES): cv.positive_int, | ||||||
|  |         vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None), | ||||||
|  |         vol.Required(ATTR_LATITUDE): cv.latitude, | ||||||
|  |         vol.Required(ATTR_LONGITUDE): cv.longitude, | ||||||
|  |         vol.Required(ATTR_ID): cv.string, | ||||||
|  |         vol.Required(ATTR_NAME): cv.string, | ||||||
|  |         vol.Required(ATTR_TIMESTAMP): cv.string, | ||||||
|  |         vol.Optional(ATTR_EXTRA): vol.Schema( | ||||||
|  |             {vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA | ||||||
|  |         ), | ||||||
|  |     }, | ||||||
|  |     extra=vol.REMOVE_EXTRA, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | STATIONS_RESPONSE_SCHEMA = vol.Schema( | ||||||
|  |     { | ||||||
|  |         vol.Required(ATTR_NETWORK): vol.Schema( | ||||||
|  |             {vol.Required(ATTR_STATIONS_LIST): [STATION_SCHEMA]}, extra=vol.REMOVE_EXTRA | ||||||
|  |         ) | ||||||
|  |     } | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class CityBikesRequestError(Exception): | ||||||
|  |     """Error to indicate a CityBikes API request has failed.""" | ||||||
|  |  | ||||||
|  |  | ||||||
|  | async def async_citybikes_request(hass, uri, schema): | ||||||
|  |     """Perform a request to CityBikes API endpoint, and parse the response.""" | ||||||
|  |     try: | ||||||
|  |         session = async_get_clientsession(hass) | ||||||
|  |  | ||||||
|  |         async with asyncio.timeout(REQUEST_TIMEOUT): | ||||||
|  |             req = await session.get(DEFAULT_ENDPOINT.format(uri=uri)) | ||||||
|  |  | ||||||
|  |         json_response = await req.json() | ||||||
|  |         return schema(json_response) | ||||||
|  |     except (TimeoutError, aiohttp.ClientError): | ||||||
|  |         _LOGGER.error("Could not connect to CityBikes API endpoint") | ||||||
|  |     except ValueError: | ||||||
|  |         _LOGGER.error("Received non-JSON data from CityBikes API endpoint") | ||||||
|  |     except vol.Invalid as err: | ||||||
|  |         _LOGGER.error("Received unexpected JSON from CityBikes API endpoint: %s", err) | ||||||
|  |     raise CityBikesRequestError | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_platform( | async def async_setup_platform( | ||||||
|     hass: HomeAssistant, |     hass: HomeAssistant, | ||||||
| @@ -111,14 +175,6 @@ async def async_setup_platform( | |||||||
|             radius, UnitOfLength.FEET, UnitOfLength.METERS |             radius, UnitOfLength.FEET, UnitOfLength.METERS | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     client = CitybikesClient(user_agent=HA_USER_AGENT, timeout=REQUEST_TIMEOUT) |  | ||||||
|     hass.data[PLATFORM][DATA_CLIENT] = client |  | ||||||
|  |  | ||||||
|     async def _async_close_client(event): |  | ||||||
|         await client.close() |  | ||||||
|  |  | ||||||
|     hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_client) |  | ||||||
|  |  | ||||||
|     # Create a single instance of CityBikesNetworks. |     # Create a single instance of CityBikesNetworks. | ||||||
|     networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass)) |     networks = hass.data.setdefault(CITYBIKES_NETWORKS, CityBikesNetworks(hass)) | ||||||
|  |  | ||||||
| @@ -138,10 +194,10 @@ async def async_setup_platform( | |||||||
|     devices = [] |     devices = [] | ||||||
|     for station in network.stations: |     for station in network.stations: | ||||||
|         dist = location_util.distance( |         dist = location_util.distance( | ||||||
|             latitude, longitude, station.latitude, station.longitude |             latitude, longitude, station[ATTR_LATITUDE], station[ATTR_LONGITUDE] | ||||||
|         ) |         ) | ||||||
|         station_id = station.id |         station_id = station[ATTR_ID] | ||||||
|         station_uid = str(station.extra.get(ATTR_UID, "")) |         station_uid = str(station.get(ATTR_EXTRA, {}).get(ATTR_UID, "")) | ||||||
|  |  | ||||||
|         if radius > dist or stations_list.intersection((station_id, station_uid)): |         if radius > dist or stations_list.intersection((station_id, station_uid)): | ||||||
|             if name: |             if name: | ||||||
| @@ -160,7 +216,6 @@ class CityBikesNetworks: | |||||||
|     def __init__(self, hass): |     def __init__(self, hass): | ||||||
|         """Initialize the networks instance.""" |         """Initialize the networks instance.""" | ||||||
|         self.hass = hass |         self.hass = hass | ||||||
|         self.client = hass.data[PLATFORM][DATA_CLIENT] |  | ||||||
|         self.networks = None |         self.networks = None | ||||||
|         self.networks_loading = asyncio.Condition() |         self.networks_loading = asyncio.Condition() | ||||||
|  |  | ||||||
| @@ -169,21 +224,24 @@ class CityBikesNetworks: | |||||||
|         try: |         try: | ||||||
|             await self.networks_loading.acquire() |             await self.networks_loading.acquire() | ||||||
|             if self.networks is None: |             if self.networks is None: | ||||||
|                 self.networks = await self.client.networks.fetch() |                 networks = await async_citybikes_request( | ||||||
|         except aiohttp.ClientError as err: |                     self.hass, NETWORKS_URI, NETWORKS_RESPONSE_SCHEMA | ||||||
|  |                 ) | ||||||
|  |                 self.networks = networks[ATTR_NETWORKS_LIST] | ||||||
|  |         except CityBikesRequestError as err: | ||||||
|             raise PlatformNotReady from err |             raise PlatformNotReady from err | ||||||
|         else: |         else: | ||||||
|             result = None |             result = None | ||||||
|             minimum_dist = None |             minimum_dist = None | ||||||
|             for network in self.networks: |             for network in self.networks: | ||||||
|                 network_latitude = network.location.latitude |                 network_latitude = network[ATTR_LOCATION][ATTR_LATITUDE] | ||||||
|                 network_longitude = network.location.longitude |                 network_longitude = network[ATTR_LOCATION][ATTR_LONGITUDE] | ||||||
|                 dist = location_util.distance( |                 dist = location_util.distance( | ||||||
|                     latitude, longitude, network_latitude, network_longitude |                     latitude, longitude, network_latitude, network_longitude | ||||||
|                 ) |                 ) | ||||||
|                 if minimum_dist is None or dist < minimum_dist: |                 if minimum_dist is None or dist < minimum_dist: | ||||||
|                     minimum_dist = dist |                     minimum_dist = dist | ||||||
|                     result = network.id |                     result = network[ATTR_ID] | ||||||
|  |  | ||||||
|             return result |             return result | ||||||
|         finally: |         finally: | ||||||
| @@ -199,20 +257,22 @@ class CityBikesNetwork: | |||||||
|         self.network_id = network_id |         self.network_id = network_id | ||||||
|         self.stations = [] |         self.stations = [] | ||||||
|         self.ready = asyncio.Event() |         self.ready = asyncio.Event() | ||||||
|         self.client = hass.data[PLATFORM][DATA_CLIENT] |  | ||||||
|  |  | ||||||
|     async def async_refresh(self, now=None): |     async def async_refresh(self, now=None): | ||||||
|         """Refresh the state of the network.""" |         """Refresh the state of the network.""" | ||||||
|         try: |         try: | ||||||
|             network = await self.client.network(uid=self.network_id).fetch() |             network = await async_citybikes_request( | ||||||
|         except aiohttp.ClientError as err: |                 self.hass, | ||||||
|             if now is None: |                 STATIONS_URI.format(uid=self.network_id), | ||||||
|                 raise PlatformNotReady from err |                 STATIONS_RESPONSE_SCHEMA, | ||||||
|             self.ready.clear() |             ) | ||||||
|             return |             self.stations = network[ATTR_NETWORK][ATTR_STATIONS_LIST] | ||||||
|  |  | ||||||
|         self.stations = network.stations |  | ||||||
|             self.ready.set() |             self.ready.set() | ||||||
|  |         except CityBikesRequestError as err: | ||||||
|  |             if now is not None: | ||||||
|  |                 self.ready.clear() | ||||||
|  |             else: | ||||||
|  |                 raise PlatformNotReady from err | ||||||
|  |  | ||||||
|  |  | ||||||
| class CityBikesStation(SensorEntity): | class CityBikesStation(SensorEntity): | ||||||
| @@ -230,13 +290,16 @@ class CityBikesStation(SensorEntity): | |||||||
|  |  | ||||||
|     async def async_update(self) -> None: |     async def async_update(self) -> None: | ||||||
|         """Update station state.""" |         """Update station state.""" | ||||||
|         station = next(s for s in self._network.stations if s.id == self._station_id) |         for station in self._network.stations: | ||||||
|         self._attr_name = station.name |             if station[ATTR_ID] == self._station_id: | ||||||
|         self._attr_native_value = station.free_bikes |                 station_data = station | ||||||
|  |                 break | ||||||
|  |         self._attr_name = station_data.get(ATTR_NAME) | ||||||
|  |         self._attr_native_value = station_data.get(ATTR_FREE_BIKES) | ||||||
|         self._attr_extra_state_attributes = { |         self._attr_extra_state_attributes = { | ||||||
|             ATTR_UID: station.extra.get(ATTR_UID), |             ATTR_UID: station_data.get(ATTR_EXTRA, {}).get(ATTR_UID), | ||||||
|             ATTR_LATITUDE: station.latitude, |             ATTR_LATITUDE: station_data.get(ATTR_LATITUDE), | ||||||
|             ATTR_LONGITUDE: station.longitude, |             ATTR_LONGITUDE: station_data.get(ATTR_LONGITUDE), | ||||||
|             ATTR_EMPTY_SLOTS: station.empty_slots, |             ATTR_EMPTY_SLOTS: station_data.get(ATTR_EMPTY_SLOTS), | ||||||
|             ATTR_TIMESTAMP: station.timestamp, |             ATTR_TIMESTAMP: station_data.get(ATTR_TIMESTAMP), | ||||||
|         } |         } | ||||||
|   | |||||||
| @@ -19,7 +19,7 @@ from homeassistant.components.alexa import ( | |||||||
|     errors as alexa_errors, |     errors as alexa_errors, | ||||||
|     smart_home as alexa_smart_home, |     smart_home as alexa_smart_home, | ||||||
| ) | ) | ||||||
| from homeassistant.components.camera import async_register_ice_servers | from homeassistant.components.camera.webrtc import async_register_ice_servers | ||||||
| from homeassistant.components.google_assistant import smart_home as ga | from homeassistant.components.google_assistant import smart_home as ga | ||||||
| from homeassistant.const import __version__ as HA_VERSION | from homeassistant.const import __version__ as HA_VERSION | ||||||
| from homeassistant.core import Context, HassJob, HomeAssistant, callback | from homeassistant.core import Context, HassJob, HomeAssistant, callback | ||||||
|   | |||||||
| @@ -12,9 +12,7 @@ from hass_nabucasa.google_report_state import ErrorResponse | |||||||
|  |  | ||||||
| from homeassistant.components.binary_sensor import BinarySensorDeviceClass | from homeassistant.components.binary_sensor import BinarySensorDeviceClass | ||||||
| from homeassistant.components.google_assistant import DOMAIN as GOOGLE_DOMAIN | from homeassistant.components.google_assistant import DOMAIN as GOOGLE_DOMAIN | ||||||
| from homeassistant.components.google_assistant.helpers import (  # pylint: disable=hass-component-root-import | from homeassistant.components.google_assistant.helpers import AbstractConfig | ||||||
|     AbstractConfig, |  | ||||||
| ) |  | ||||||
| from homeassistant.components.homeassistant.exposed_entities import ( | from homeassistant.components.homeassistant.exposed_entities import ( | ||||||
|     async_expose_entity, |     async_expose_entity, | ||||||
|     async_get_assistant_settings, |     async_get_assistant_settings, | ||||||
|   | |||||||
| @@ -13,6 +13,6 @@ | |||||||
|   "integration_type": "system", |   "integration_type": "system", | ||||||
|   "iot_class": "cloud_push", |   "iot_class": "cloud_push", | ||||||
|   "loggers": ["acme", "hass_nabucasa", "snitun"], |   "loggers": ["acme", "hass_nabucasa", "snitun"], | ||||||
|   "requirements": ["hass-nabucasa==1.4.0"], |   "requirements": ["hass-nabucasa==1.2.0"], | ||||||
|   "single_config_entry": true |   "single_config_entry": true | ||||||
| } | } | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ from hass_nabucasa.voice import MAP_VOICE, Gender | |||||||
| from homeassistant.auth.const import GROUP_ID_ADMIN | from homeassistant.auth.const import GROUP_ID_ADMIN | ||||||
| from homeassistant.auth.models import User | from homeassistant.auth.models import User | ||||||
| from homeassistant.components import webhook | from homeassistant.components import webhook | ||||||
| from homeassistant.components.google_assistant.http import (  # pylint: disable=hass-component-root-import | from homeassistant.components.google_assistant.http import ( | ||||||
|     async_get_users as async_get_google_assistant_users, |     async_get_users as async_get_google_assistant_users, | ||||||
| ) | ) | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant, callback | ||||||
|   | |||||||
| @@ -38,10 +38,6 @@ TYPE_SPECIFY_COUNTRY = "specify_country_code" | |||||||
|  |  | ||||||
| _LOGGER = logging.getLogger(__name__) | _LOGGER = logging.getLogger(__name__) | ||||||
|  |  | ||||||
| DESCRIPTION_PLACEHOLDER = { |  | ||||||
|     "register_link": "https://electricitymaps.com/free-tier", |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||||
|     """Handle a config flow for Co2signal.""" |     """Handle a config flow for Co2signal.""" | ||||||
| @@ -74,7 +70,6 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|             return self.async_show_form( |             return self.async_show_form( | ||||||
|                 step_id="user", |                 step_id="user", | ||||||
|                 data_schema=data_schema, |                 data_schema=data_schema, | ||||||
|                 description_placeholders=DESCRIPTION_PLACEHOLDER, |  | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         data = {CONF_API_KEY: user_input[CONF_API_KEY]} |         data = {CONF_API_KEY: user_input[CONF_API_KEY]} | ||||||
| @@ -184,5 +179,4 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|             step_id=step_id, |             step_id=step_id, | ||||||
|             data_schema=data_schema, |             data_schema=data_schema, | ||||||
|             errors=errors, |             errors=errors, | ||||||
|             description_placeholders=DESCRIPTION_PLACEHOLDER, |  | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -18,6 +18,7 @@ rules: | |||||||
|     status: todo |     status: todo | ||||||
|     comment: | |     comment: | | ||||||
|       The config flow misses data descriptions. |       The config flow misses data descriptions. | ||||||
|  |       Remove URLs from data descriptions, they should be replaced with placeholders. | ||||||
|       Make use of Electricity Maps zone keys in country code as dropdown. |       Make use of Electricity Maps zone keys in country code as dropdown. | ||||||
|       Make use of location selector for coordinates. |       Make use of location selector for coordinates. | ||||||
|   dependency-transparency: done |   dependency-transparency: done | ||||||
|   | |||||||
| @@ -6,7 +6,7 @@ | |||||||
|           "location": "[%key:common::config_flow::data::location%]", |           "location": "[%key:common::config_flow::data::location%]", | ||||||
|           "api_key": "[%key:common::config_flow::data::access_token%]" |           "api_key": "[%key:common::config_flow::data::access_token%]" | ||||||
|         }, |         }, | ||||||
|         "description": "Visit the [Electricity Maps page]({register_link}) to request a token." |         "description": "Visit https://electricitymaps.com/free-tier to request a token." | ||||||
|       }, |       }, | ||||||
|       "coordinates": { |       "coordinates": { | ||||||
|         "data": { |         "data": { | ||||||
|   | |||||||
| @@ -166,7 +166,6 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|                 data_schema=STEP_USER_DATA_SCHEMA, |                 data_schema=STEP_USER_DATA_SCHEMA, | ||||||
|                 description_placeholders={ |                 description_placeholders={ | ||||||
|                     "account_name": self.reauth_entry.title, |                     "account_name": self.reauth_entry.title, | ||||||
|                     "developer_url": "https://www.coinbase.com/developer-platform", |  | ||||||
|                 }, |                 }, | ||||||
|                 errors=errors, |                 errors=errors, | ||||||
|             ) |             ) | ||||||
| @@ -196,7 +195,6 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|             data_schema=STEP_USER_DATA_SCHEMA, |             data_schema=STEP_USER_DATA_SCHEMA, | ||||||
|             description_placeholders={ |             description_placeholders={ | ||||||
|                 "account_name": self.reauth_entry.title, |                 "account_name": self.reauth_entry.title, | ||||||
|                 "developer_url": "https://www.coinbase.com/developer-platform", |  | ||||||
|             }, |             }, | ||||||
|             errors=errors, |             errors=errors, | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ | |||||||
|       }, |       }, | ||||||
|       "reauth_confirm": { |       "reauth_confirm": { | ||||||
|         "title": "Update Coinbase API credentials", |         "title": "Update Coinbase API credentials", | ||||||
|         "description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit the [Developer Platform]({developer_url}) to create new credentials for {account_name}.", |         "description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform  to create new credentials for {account_name}.", | ||||||
|         "data": { |         "data": { | ||||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", |           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||||
|           "api_token": "API secret" |           "api_token": "API secret" | ||||||
|   | |||||||
| @@ -15,7 +15,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | |||||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity | from homeassistant.helpers.update_coordinator import CoordinatorEntity | ||||||
|  |  | ||||||
| from .coordinator import ComelitConfigEntry, ComelitVedoSystem | from .coordinator import ComelitConfigEntry, ComelitVedoSystem | ||||||
| from .utils import DeviceType, new_device_listener |  | ||||||
|  |  | ||||||
| # Coordinator is used to centralize the data updates | # Coordinator is used to centralize the data updates | ||||||
| PARALLEL_UPDATES = 0 | PARALLEL_UPDATES = 0 | ||||||
| @@ -30,19 +29,23 @@ async def async_setup_entry( | |||||||
|  |  | ||||||
|     coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) |     coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) | ||||||
|  |  | ||||||
|     def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: |     known_devices: set[int] = set() | ||||||
|         """Add entities for new monitors.""" |  | ||||||
|         entities = [ |  | ||||||
|             ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id) |  | ||||||
|             for device in coordinator.data["alarm_zones"].values() |  | ||||||
|             if device in new_devices |  | ||||||
|         ] |  | ||||||
|         if entities: |  | ||||||
|             async_add_entities(entities) |  | ||||||
|  |  | ||||||
|     config_entry.async_on_unload( |     def _check_device() -> None: | ||||||
|         new_device_listener(coordinator, _add_new_entities, "alarm_zones") |         current_devices = set(coordinator.data["alarm_zones"]) | ||||||
|  |         new_devices = current_devices - known_devices | ||||||
|  |         if new_devices: | ||||||
|  |             known_devices.update(new_devices) | ||||||
|  |             async_add_entities( | ||||||
|  |                 ComelitVedoBinarySensorEntity( | ||||||
|  |                     coordinator, device, config_entry.entry_id | ||||||
|                 ) |                 ) | ||||||
|  |                 for device in coordinator.data["alarm_zones"].values() | ||||||
|  |                 if device.index in new_devices | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     _check_device() | ||||||
|  |     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComelitVedoBinarySensorEntity( | class ComelitVedoBinarySensorEntity( | ||||||
|   | |||||||
| @@ -7,14 +7,14 @@ from typing import Any, cast | |||||||
| from aiocomelit import ComelitSerialBridgeObject | from aiocomelit import ComelitSerialBridgeObject | ||||||
| from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON | from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON | ||||||
|  |  | ||||||
| from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState | from homeassistant.components.cover import CoverDeviceClass, CoverEntity | ||||||
| from homeassistant.core import HomeAssistant | from homeassistant.core import HomeAssistant | ||||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||||
| from homeassistant.helpers.restore_state import RestoreEntity | from homeassistant.helpers.restore_state import RestoreEntity | ||||||
|  |  | ||||||
| from .coordinator import ComelitConfigEntry, ComelitSerialBridge | from .coordinator import ComelitConfigEntry, ComelitSerialBridge | ||||||
| from .entity import ComelitBridgeBaseEntity | from .entity import ComelitBridgeBaseEntity | ||||||
| from .utils import DeviceType, bridge_api_call, new_device_listener | from .utils import bridge_api_call | ||||||
|  |  | ||||||
| # Coordinator is used to centralize the data updates | # Coordinator is used to centralize the data updates | ||||||
| PARALLEL_UPDATES = 0 | PARALLEL_UPDATES = 0 | ||||||
| @@ -29,20 +29,22 @@ async def async_setup_entry( | |||||||
|  |  | ||||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) |     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||||
|  |  | ||||||
|     def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: |     known_devices: set[int] = set() | ||||||
|         """Add entities for new monitors.""" |  | ||||||
|         entities = [ |  | ||||||
|             ComelitCoverEntity(coordinator, device, config_entry.entry_id) |  | ||||||
|             for device in coordinator.data[dev_type].values() |  | ||||||
|             if device in new_devices |  | ||||||
|         ] |  | ||||||
|         if entities: |  | ||||||
|             async_add_entities(entities) |  | ||||||
|  |  | ||||||
|     config_entry.async_on_unload( |     def _check_device() -> None: | ||||||
|         new_device_listener(coordinator, _add_new_entities, COVER) |         current_devices = set(coordinator.data[COVER]) | ||||||
|  |         new_devices = current_devices - known_devices | ||||||
|  |         if new_devices: | ||||||
|  |             known_devices.update(new_devices) | ||||||
|  |             async_add_entities( | ||||||
|  |                 ComelitCoverEntity(coordinator, device, config_entry.entry_id) | ||||||
|  |                 for device in coordinator.data[COVER].values() | ||||||
|  |                 if device.index in new_devices | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  |     _check_device() | ||||||
|  |     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | ||||||
|     """Cover device.""" |     """Cover device.""" | ||||||
| @@ -60,6 +62,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | |||||||
|         super().__init__(coordinator, device, config_entry_entry_id) |         super().__init__(coordinator, device, config_entry_entry_id) | ||||||
|         # Device doesn't provide a status so we assume UNKNOWN at first startup |         # Device doesn't provide a status so we assume UNKNOWN at first startup | ||||||
|         self._last_action: int | None = None |         self._last_action: int | None = None | ||||||
|  |         self._last_state: str | None = None | ||||||
|  |  | ||||||
|     def _current_action(self, action: str) -> bool: |     def _current_action(self, action: str) -> bool: | ||||||
|         """Return the current cover action.""" |         """Return the current cover action.""" | ||||||
| @@ -95,6 +98,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | |||||||
|     @bridge_api_call |     @bridge_api_call | ||||||
|     async def _cover_set_state(self, action: int, state: int) -> None: |     async def _cover_set_state(self, action: int, state: int) -> None: | ||||||
|         """Set desired cover state.""" |         """Set desired cover state.""" | ||||||
|  |         self._last_state = self.state | ||||||
|         await self.coordinator.api.set_device_status(COVER, self._device.index, action) |         await self.coordinator.api.set_device_status(COVER, self._device.index, action) | ||||||
|         self.coordinator.data[COVER][self._device.index].status = state |         self.coordinator.data[COVER][self._device.index].status = state | ||||||
|         self.async_write_ha_state() |         self.async_write_ha_state() | ||||||
| @@ -120,10 +124,5 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | |||||||
|  |  | ||||||
|         await super().async_added_to_hass() |         await super().async_added_to_hass() | ||||||
|  |  | ||||||
|         if (state := await self.async_get_last_state()) is not None: |         if last_state := await self.async_get_last_state(): | ||||||
|             if state.state == CoverState.CLOSED: |             self._last_state = last_state.state | ||||||
|                 self._last_action = STATE_COVER.index(CoverState.CLOSING) |  | ||||||
|             if state.state == CoverState.OPEN: |  | ||||||
|                 self._last_action = STATE_COVER.index(CoverState.OPENING) |  | ||||||
|  |  | ||||||
|             self._attr_is_closed = state.state == CoverState.CLOSED |  | ||||||
|   | |||||||
| @@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | |||||||
|  |  | ||||||
| from .coordinator import ComelitConfigEntry, ComelitSerialBridge | from .coordinator import ComelitConfigEntry, ComelitSerialBridge | ||||||
| from .entity import ComelitBridgeBaseEntity | from .entity import ComelitBridgeBaseEntity | ||||||
| from .utils import DeviceType, bridge_api_call, new_device_listener | from .utils import bridge_api_call | ||||||
|  |  | ||||||
| # Coordinator is used to centralize the data updates | # Coordinator is used to centralize the data updates | ||||||
| PARALLEL_UPDATES = 0 | PARALLEL_UPDATES = 0 | ||||||
| @@ -27,20 +27,22 @@ async def async_setup_entry( | |||||||
|  |  | ||||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) |     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||||
|  |  | ||||||
|     def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: |     known_devices: set[int] = set() | ||||||
|         """Add entities for new monitors.""" |  | ||||||
|         entities = [ |  | ||||||
|             ComelitLightEntity(coordinator, device, config_entry.entry_id) |  | ||||||
|             for device in coordinator.data[dev_type].values() |  | ||||||
|             if device in new_devices |  | ||||||
|         ] |  | ||||||
|         if entities: |  | ||||||
|             async_add_entities(entities) |  | ||||||
|  |  | ||||||
|     config_entry.async_on_unload( |     def _check_device() -> None: | ||||||
|         new_device_listener(coordinator, _add_new_entities, LIGHT) |         current_devices = set(coordinator.data[LIGHT]) | ||||||
|  |         new_devices = current_devices - known_devices | ||||||
|  |         if new_devices: | ||||||
|  |             known_devices.update(new_devices) | ||||||
|  |             async_add_entities( | ||||||
|  |                 ComelitLightEntity(coordinator, device, config_entry.entry_id) | ||||||
|  |                 for device in coordinator.data[LIGHT].values() | ||||||
|  |                 if device.index in new_devices | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  |     _check_device() | ||||||
|  |     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity): | class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity): | ||||||
|     """Light device.""" |     """Light device.""" | ||||||
|   | |||||||
| @@ -8,5 +8,5 @@ | |||||||
|   "iot_class": "local_polling", |   "iot_class": "local_polling", | ||||||
|   "loggers": ["aiocomelit"], |   "loggers": ["aiocomelit"], | ||||||
|   "quality_scale": "platinum", |   "quality_scale": "platinum", | ||||||
|   "requirements": ["aiocomelit==1.1.2"] |   "requirements": ["aiocomelit==1.1.1"] | ||||||
| } | } | ||||||
|   | |||||||
| @@ -20,7 +20,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity | |||||||
|  |  | ||||||
| from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem | from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem | ||||||
| from .entity import ComelitBridgeBaseEntity | from .entity import ComelitBridgeBaseEntity | ||||||
| from .utils import DeviceType, new_device_listener |  | ||||||
|  |  | ||||||
| # Coordinator is used to centralize the data updates | # Coordinator is used to centralize the data updates | ||||||
| PARALLEL_UPDATES = 0 | PARALLEL_UPDATES = 0 | ||||||
| @@ -66,23 +65,25 @@ async def async_setup_bridge_entry( | |||||||
|  |  | ||||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) |     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||||
|  |  | ||||||
|     def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: |     known_devices: set[int] = set() | ||||||
|         """Add entities for new monitors.""" |  | ||||||
|         entities = [ |     def _check_device() -> None: | ||||||
|  |         current_devices = set(coordinator.data[OTHER]) | ||||||
|  |         new_devices = current_devices - known_devices | ||||||
|  |         if new_devices: | ||||||
|  |             known_devices.update(new_devices) | ||||||
|  |             async_add_entities( | ||||||
|                 ComelitBridgeSensorEntity( |                 ComelitBridgeSensorEntity( | ||||||
|                     coordinator, device, config_entry.entry_id, sensor_desc |                     coordinator, device, config_entry.entry_id, sensor_desc | ||||||
|                 ) |                 ) | ||||||
|                 for sensor_desc in SENSOR_BRIDGE_TYPES |                 for sensor_desc in SENSOR_BRIDGE_TYPES | ||||||
|             for device in coordinator.data[dev_type].values() |                 for device in coordinator.data[OTHER].values() | ||||||
|             if device in new_devices |                 if device.index in new_devices | ||||||
|         ] |  | ||||||
|         if entities: |  | ||||||
|             async_add_entities(entities) |  | ||||||
|  |  | ||||||
|     config_entry.async_on_unload( |  | ||||||
|         new_device_listener(coordinator, _add_new_entities, OTHER) |  | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  |     _check_device() | ||||||
|  |     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_setup_vedo_entry( | async def async_setup_vedo_entry( | ||||||
|     hass: HomeAssistant, |     hass: HomeAssistant, | ||||||
| @@ -93,23 +94,25 @@ async def async_setup_vedo_entry( | |||||||
|  |  | ||||||
|     coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) |     coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) | ||||||
|  |  | ||||||
|     def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: |     known_devices: set[int] = set() | ||||||
|         """Add entities for new monitors.""" |  | ||||||
|         entities = [ |     def _check_device() -> None: | ||||||
|  |         current_devices = set(coordinator.data["alarm_zones"]) | ||||||
|  |         new_devices = current_devices - known_devices | ||||||
|  |         if new_devices: | ||||||
|  |             known_devices.update(new_devices) | ||||||
|  |             async_add_entities( | ||||||
|                 ComelitVedoSensorEntity( |                 ComelitVedoSensorEntity( | ||||||
|                     coordinator, device, config_entry.entry_id, sensor_desc |                     coordinator, device, config_entry.entry_id, sensor_desc | ||||||
|                 ) |                 ) | ||||||
|                 for sensor_desc in SENSOR_VEDO_TYPES |                 for sensor_desc in SENSOR_VEDO_TYPES | ||||||
|                 for device in coordinator.data["alarm_zones"].values() |                 for device in coordinator.data["alarm_zones"].values() | ||||||
|             if device in new_devices |                 if device.index in new_devices | ||||||
|         ] |  | ||||||
|         if entities: |  | ||||||
|             async_add_entities(entities) |  | ||||||
|  |  | ||||||
|     config_entry.async_on_unload( |  | ||||||
|         new_device_listener(coordinator, _add_new_entities, "alarm_zones") |  | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|  |     _check_device() | ||||||
|  |     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity): | class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity): | ||||||
|     """Sensor device.""" |     """Sensor device.""" | ||||||
|   | |||||||
| @@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | |||||||
|  |  | ||||||
| from .coordinator import ComelitConfigEntry, ComelitSerialBridge | from .coordinator import ComelitConfigEntry, ComelitSerialBridge | ||||||
| from .entity import ComelitBridgeBaseEntity | from .entity import ComelitBridgeBaseEntity | ||||||
| from .utils import DeviceType, bridge_api_call, new_device_listener | from .utils import bridge_api_call | ||||||
|  |  | ||||||
| # Coordinator is used to centralize the data updates | # Coordinator is used to centralize the data updates | ||||||
| PARALLEL_UPDATES = 0 | PARALLEL_UPDATES = 0 | ||||||
| @@ -28,21 +28,36 @@ async def async_setup_entry( | |||||||
|  |  | ||||||
|     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) |     coordinator = cast(ComelitSerialBridge, config_entry.runtime_data) | ||||||
|  |  | ||||||
|     def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None: |     entities: list[ComelitSwitchEntity] = [] | ||||||
|         """Add entities for new monitors.""" |     entities.extend( | ||||||
|         entities = [ |  | ||||||
|         ComelitSwitchEntity(coordinator, device, config_entry.entry_id) |         ComelitSwitchEntity(coordinator, device, config_entry.entry_id) | ||||||
|             for device in coordinator.data[dev_type].values() |         for device in coordinator.data[IRRIGATION].values() | ||||||
|             if device in new_devices |     ) | ||||||
|         ] |     entities.extend( | ||||||
|         if entities: |         ComelitSwitchEntity(coordinator, device, config_entry.entry_id) | ||||||
|  |         for device in coordinator.data[OTHER].values() | ||||||
|  |     ) | ||||||
|     async_add_entities(entities) |     async_add_entities(entities) | ||||||
|  |  | ||||||
|  |     known_devices: dict[str, set[int]] = { | ||||||
|  |         dev_type: set() for dev_type in (IRRIGATION, OTHER) | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     def _check_device() -> None: | ||||||
|         for dev_type in (IRRIGATION, OTHER): |         for dev_type in (IRRIGATION, OTHER): | ||||||
|         config_entry.async_on_unload( |             current_devices = set(coordinator.data[dev_type]) | ||||||
|             new_device_listener(coordinator, _add_new_entities, dev_type) |             new_devices = current_devices - known_devices[dev_type] | ||||||
|  |             if new_devices: | ||||||
|  |                 known_devices[dev_type].update(new_devices) | ||||||
|  |                 async_add_entities( | ||||||
|  |                     ComelitSwitchEntity(coordinator, device, config_entry.entry_id) | ||||||
|  |                     for device in coordinator.data[dev_type].values() | ||||||
|  |                     if device.index in new_devices | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|  |     _check_device() | ||||||
|  |     config_entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity): | class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity): | ||||||
|     """Switch device.""" |     """Switch device.""" | ||||||
|   | |||||||
| @@ -4,11 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine | |||||||
| from functools import wraps | from functools import wraps | ||||||
| from typing import Any, Concatenate | from typing import Any, Concatenate | ||||||
|  |  | ||||||
| from aiocomelit.api import ( | from aiocomelit import ComelitSerialBridgeObject | ||||||
|     ComelitSerialBridgeObject, |  | ||||||
|     ComelitVedoAreaObject, |  | ||||||
|     ComelitVedoZoneObject, |  | ||||||
| ) |  | ||||||
| from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData | from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData | ||||||
| from aiohttp import ClientSession, CookieJar | from aiohttp import ClientSession, CookieJar | ||||||
|  |  | ||||||
| @@ -23,11 +19,8 @@ from homeassistant.helpers import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| from .const import _LOGGER, DOMAIN | from .const import _LOGGER, DOMAIN | ||||||
| from .coordinator import ComelitBaseCoordinator |  | ||||||
| from .entity import ComelitBridgeBaseEntity | from .entity import ComelitBridgeBaseEntity | ||||||
|  |  | ||||||
| DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject |  | ||||||
|  |  | ||||||
|  |  | ||||||
| async def async_client_session(hass: HomeAssistant) -> ClientSession: | async def async_client_session(hass: HomeAssistant) -> ClientSession: | ||||||
|     """Return a new aiohttp session.""" |     """Return a new aiohttp session.""" | ||||||
| @@ -120,41 +113,3 @@ def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P]( | |||||||
|             self.coordinator.config_entry.async_start_reauth(self.hass) |             self.coordinator.config_entry.async_start_reauth(self.hass) | ||||||
|  |  | ||||||
|     return cmd_wrapper |     return cmd_wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
| def new_device_listener( |  | ||||||
|     coordinator: ComelitBaseCoordinator, |  | ||||||
|     new_devices_callback: Callable[ |  | ||||||
|         [ |  | ||||||
|             list[ |  | ||||||
|                 ComelitSerialBridgeObject |  | ||||||
|                 | ComelitVedoAreaObject |  | ||||||
|                 | ComelitVedoZoneObject |  | ||||||
|             ], |  | ||||||
|             str, |  | ||||||
|         ], |  | ||||||
|         None, |  | ||||||
|     ], |  | ||||||
|     data_type: str, |  | ||||||
| ) -> Callable[[], None]: |  | ||||||
|     """Subscribe to coordinator updates to check for new devices.""" |  | ||||||
|     known_devices: dict[str, list[int]] = {} |  | ||||||
|  |  | ||||||
|     def _check_devices() -> None: |  | ||||||
|         """Check for new devices and call callback with any new monitors.""" |  | ||||||
|         if not coordinator.data: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         new_devices: list[DeviceType] = [] |  | ||||||
|         for _id in coordinator.data[data_type]: |  | ||||||
|             if _id not in (id_list := known_devices.get(data_type, [])): |  | ||||||
|                 known_devices.update({data_type: [*id_list, _id]}) |  | ||||||
|                 new_devices.append(coordinator.data[data_type][_id]) |  | ||||||
|  |  | ||||||
|         if new_devices: |  | ||||||
|             new_devices_callback(new_devices, data_type) |  | ||||||
|  |  | ||||||
|     # Check for devices immediately |  | ||||||
|     _check_devices() |  | ||||||
|  |  | ||||||
|     return coordinator.async_add_listener(_check_devices) |  | ||||||
|   | |||||||
| @@ -78,10 +78,7 @@ class CompitConfigFlow(ConfigFlow, domain=DOMAIN): | |||||||
|                     ) |                     ) | ||||||
|  |  | ||||||
|         return self.async_show_form( |         return self.async_show_form( | ||||||
|             step_id="user", |             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||||
|             data_schema=STEP_USER_DATA_SCHEMA, |  | ||||||
|             errors=errors, |  | ||||||
|             description_placeholders={"compit_url": "https://inext.compit.pl/"}, |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: |     async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ | |||||||
|   "config": { |   "config": { | ||||||
|     "step": { |     "step": { | ||||||
|       "user": { |       "user": { | ||||||
|         "description": "Please enter your {compit_url} credentials.", |         "description": "Please enter your https://inext.compit.pl/ credentials.", | ||||||
|         "title": "Connect to Compit iNext", |         "title": "Connect to Compit iNext", | ||||||
|         "data": { |         "data": { | ||||||
|           "email": "[%key:common::config_flow::data::email%]", |           "email": "[%key:common::config_flow::data::email%]", | ||||||
|   | |||||||
| @@ -6,9 +6,7 @@ from typing import Any | |||||||
| import uuid | import uuid | ||||||
|  |  | ||||||
| from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN | from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN | ||||||
| from homeassistant.components.automation.config import (  # pylint: disable=hass-component-root-import | from homeassistant.components.automation.config import async_validate_config_item | ||||||
|     async_validate_config_item, |  | ||||||
| ) |  | ||||||
| from homeassistant.config import AUTOMATION_CONFIG_PATH | from homeassistant.config import AUTOMATION_CONFIG_PATH | ||||||
| from homeassistant.const import CONF_ID, SERVICE_RELOAD | from homeassistant.const import CONF_ID, SERVICE_RELOAD | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant, callback | ||||||
|   | |||||||
| @@ -5,9 +5,7 @@ from __future__ import annotations | |||||||
| from typing import Any | from typing import Any | ||||||
|  |  | ||||||
| from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN | from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN | ||||||
| from homeassistant.components.script.config import (  # pylint: disable=hass-component-root-import | from homeassistant.components.script.config import async_validate_config_item | ||||||
|     async_validate_config_item, |  | ||||||
| ) |  | ||||||
| from homeassistant.config import SCRIPT_CONFIG_PATH | from homeassistant.config import SCRIPT_CONFIG_PATH | ||||||
| from homeassistant.const import SERVICE_RELOAD | from homeassistant.const import SERVICE_RELOAD | ||||||
| from homeassistant.core import HomeAssistant, callback | from homeassistant.core import HomeAssistant, callback | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user