mirror of
				https://github.com/home-assistant/core.git
				synced 2025-10-31 06:29:31 +00:00 
			
		
		
		
	Compare commits
	
		
			2 Commits
		
	
	
		
			copilot/mo
			...
			adjust_sen
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 01aa70e249 | ||
|   | 9f6a0c0c77 | 
| @@ -58,7 +58,6 @@ base_platforms: &base_platforms | ||||
| # Extra components that trigger the full suite | ||||
| components: &components | ||||
|   - homeassistant/components/alexa/** | ||||
|   - homeassistant/components/analytics/** | ||||
|   - homeassistant/components/application_credentials/** | ||||
|   - homeassistant/components/assist_pipeline/** | ||||
|   - homeassistant/components/auth/** | ||||
|   | ||||
| @@ -33,7 +33,7 @@ | ||||
|         "GitHub.vscode-pull-request-github", | ||||
|         "GitHub.copilot" | ||||
|       ], | ||||
|       // Please keep this file in sync with settings in home-assistant/.vscode/settings.default.jsonc | ||||
|       // Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json | ||||
|       "settings": { | ||||
|         "python.experiments.optOutFrom": ["pythonTestAdapter"], | ||||
|         "python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python", | ||||
| @@ -41,7 +41,6 @@ | ||||
|         "python.terminal.activateEnvInCurrentTerminal": true, | ||||
|         "python.testing.pytestArgs": ["--no-cov"], | ||||
|         "pylint.importStrategy": "fromEnvironment", | ||||
|         "python.analysis.typeCheckingMode": "basic", | ||||
|         "editor.formatOnPaste": false, | ||||
|         "editor.formatOnSave": true, | ||||
|         "editor.formatOnType": true, | ||||
| @@ -63,9 +62,6 @@ | ||||
|         "[python]": { | ||||
|           "editor.defaultFormatter": "charliermarsh.ruff" | ||||
|         }, | ||||
|         "[json][jsonc][yaml]": { | ||||
|           "editor.defaultFormatter": "esbenp.prettier-vscode" | ||||
|         }, | ||||
|         "json.schemas": [ | ||||
|           { | ||||
|             "fileMatch": ["homeassistant/components/*/manifest.json"], | ||||
|   | ||||
							
								
								
									
										1
									
								
								.github/copilot-instructions.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/copilot-instructions.md
									
									
									
									
										vendored
									
									
								
							| @@ -74,7 +74,6 @@ rules: | ||||
| - **Formatting**: Ruff | ||||
| - **Linting**: PyLint and Ruff | ||||
| - **Type Checking**: MyPy | ||||
| - **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists | ||||
| - **Testing**: pytest with plain functions and fixtures | ||||
| - **Language**: American English for all code, comments, and documentation (use sentence case, including titles) | ||||
|  | ||||
|   | ||||
							
								
								
									
										12
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										12
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -190,7 +190,7 @@ jobs: | ||||
|           echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -257,7 +257,7 @@ jobs: | ||||
|           fi | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -326,20 +326,20 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Install Cosign | ||||
|         uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 | ||||
|         uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0 | ||||
|         with: | ||||
|           cosign-release: "v2.2.3" | ||||
|  | ||||
|       - name: Login to DockerHub | ||||
|         if: matrix.registry == 'docker.io/homeassistant' | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         if: matrix.registry == 'ghcr.io/home-assistant' | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -504,7 +504,7 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|   | ||||
							
								
								
									
										738
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										738
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @@ -24,11 +24,11 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Initialize CodeQL | ||||
|         uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9 | ||||
|         uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 | ||||
|         with: | ||||
|           languages: python | ||||
|  | ||||
|       - name: Perform CodeQL Analysis | ||||
|         uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9 | ||||
|         uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 | ||||
|         with: | ||||
|           category: "/language:python" | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							| @@ -17,7 +17,7 @@ jobs: | ||||
|       # - No PRs marked as no-stale | ||||
|       # - No issues (-1) | ||||
|       - name: 60 days stale PRs policy | ||||
|         uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 | ||||
|         uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ secrets.GITHUB_TOKEN }} | ||||
|           days-before-stale: 60 | ||||
| @@ -57,7 +57,7 @@ jobs: | ||||
|       # - No issues marked as no-stale or help-wanted | ||||
|       # - No PRs (-1) | ||||
|       - name: 90 days stale issues | ||||
|         uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 | ||||
|         uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ steps.token.outputs.token }} | ||||
|           days-before-stale: 90 | ||||
| @@ -87,7 +87,7 @@ jobs: | ||||
|       # - No Issues marked as no-stale or help-wanted | ||||
|       # - No PRs (-1) | ||||
|       - name: Needs more information stale issues policy | ||||
|         uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 | ||||
|         uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ steps.token.outputs.token }} | ||||
|           only-labels: "needs-more-information" | ||||
|   | ||||
							
								
								
									
										78
									
								
								.github/workflows/wheels.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										78
									
								
								.github/workflows/wheels.yml
									
									
									
									
										vendored
									
									
								
							| @@ -31,8 +31,7 @@ jobs: | ||||
|     outputs: | ||||
|       architectures: ${{ steps.info.outputs.architectures }} | ||||
|     steps: | ||||
|       - &checkout | ||||
|         name: Checkout the repository | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Set up Python ${{ env.DEFAULT_PYTHON }} | ||||
| @@ -92,7 +91,7 @@ jobs: | ||||
|           ) > build_constraints.txt | ||||
|  | ||||
|       - name: Upload env_file | ||||
|         uses: &actions-upload-artifact actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: env_file | ||||
|           path: ./.env_file | ||||
| @@ -100,14 +99,14 @@ jobs: | ||||
|           overwrite: true | ||||
|  | ||||
|       - name: Upload build_constraints | ||||
|         uses: *actions-upload-artifact | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: build_constraints | ||||
|           path: ./build_constraints.txt | ||||
|           overwrite: true | ||||
|  | ||||
|       - name: Upload requirements_diff | ||||
|         uses: *actions-upload-artifact | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: requirements_diff | ||||
|           path: ./requirements_diff.txt | ||||
| @@ -119,7 +118,7 @@ jobs: | ||||
|           python -m script.gen_requirements_all ci | ||||
|  | ||||
|       - name: Upload requirements_all_wheels | ||||
|         uses: *actions-upload-artifact | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: requirements_all_wheels | ||||
|           path: ./requirements_all_wheels_*.txt | ||||
| @@ -128,41 +127,28 @@ jobs: | ||||
|     name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2) | ||||
|     if: github.repository_owner == 'home-assistant' | ||||
|     needs: init | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: &matrix-build | ||||
|         abi: ["cp313", "cp314"] | ||||
|       matrix: | ||||
|         abi: ["cp313"] | ||||
|         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||
|         include: | ||||
|           - os: ubuntu-latest | ||||
|           - arch: aarch64 | ||||
|             os: ubuntu-24.04-arm | ||||
|         exclude: | ||||
|           - abi: cp314 | ||||
|             arch: armv7 | ||||
|           - abi: cp314 | ||||
|             arch: armhf | ||||
|           - abi: cp314 | ||||
|             arch: i386 | ||||
|     steps: | ||||
|       - *checkout | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - &download-env-file | ||||
|         name: Download env_file | ||||
|         uses: &actions-download-artifact actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|       - name: Download env_file | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: env_file | ||||
|  | ||||
|       - &download-build-constraints | ||||
|         name: Download build_constraints | ||||
|         uses: *actions-download-artifact | ||||
|       - name: Download build_constraints | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: build_constraints | ||||
|  | ||||
|       - &download-requirements-diff | ||||
|         name: Download requirements_diff | ||||
|         uses: *actions-download-artifact | ||||
|       - name: Download requirements_diff | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: requirements_diff | ||||
|  | ||||
| @@ -174,7 +160,7 @@ jobs: | ||||
|  | ||||
|       # home-assistant/wheels doesn't support sha pinning | ||||
|       - name: Build wheels | ||||
|         uses: &home-assistant-wheels home-assistant/wheels@2025.10.0 | ||||
|         uses: home-assistant/wheels@2025.07.0 | ||||
|         with: | ||||
|           abi: ${{ matrix.abi }} | ||||
|           tag: musllinux_1_2 | ||||
| @@ -191,19 +177,33 @@ jobs: | ||||
|     name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }} | ||||
|     if: github.repository_owner == 'home-assistant' | ||||
|     needs: init | ||||
|     runs-on: ${{ matrix.os }} | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: *matrix-build | ||||
|       matrix: | ||||
|         abi: ["cp313"] | ||||
|         arch: ${{ fromJson(needs.init.outputs.architectures) }} | ||||
|     steps: | ||||
|       - *checkout | ||||
|       - name: Checkout the repository | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - *download-env-file | ||||
|       - *download-build-constraints | ||||
|       - *download-requirements-diff | ||||
|       - name: Download env_file | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: env_file | ||||
|  | ||||
|       - name: Download build_constraints | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: build_constraints | ||||
|  | ||||
|       - name: Download requirements_diff | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: requirements_diff | ||||
|  | ||||
|       - name: Download requirements_all_wheels | ||||
|         uses: *actions-download-artifact | ||||
|         uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 | ||||
|         with: | ||||
|           name: requirements_all_wheels | ||||
|  | ||||
| @@ -221,7 +221,7 @@ jobs: | ||||
|  | ||||
|       # home-assistant/wheels doesn't support sha pinning | ||||
|       - name: Build wheels | ||||
|         uses: *home-assistant-wheels | ||||
|         uses: home-assistant/wheels@2025.07.0 | ||||
|         with: | ||||
|           abi: ${{ matrix.abi }} | ||||
|           tag: musllinux_1_2 | ||||
|   | ||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -79,6 +79,7 @@ junit.xml | ||||
| .project | ||||
| .pydevproject | ||||
|  | ||||
| .python-version | ||||
| .tool-versions | ||||
|  | ||||
| # emacs auto backups | ||||
| @@ -111,7 +112,6 @@ virtualization/vagrant/config | ||||
| !.vscode/cSpell.json | ||||
| !.vscode/extensions.json | ||||
| !.vscode/tasks.json | ||||
| !.vscode/settings.default.jsonc | ||||
| .env | ||||
|  | ||||
| # Windows Explorer | ||||
| @@ -141,5 +141,4 @@ pytest_buckets.txt | ||||
|  | ||||
| # AI tooling | ||||
| .claude/settings.local.json | ||||
| .serena/ | ||||
|  | ||||
|   | ||||
| @@ -1 +0,0 @@ | ||||
| 3.13 | ||||
| @@ -142,7 +142,6 @@ homeassistant.components.cloud.* | ||||
| homeassistant.components.co2signal.* | ||||
| homeassistant.components.comelit.* | ||||
| homeassistant.components.command_line.* | ||||
| homeassistant.components.compit.* | ||||
| homeassistant.components.config.* | ||||
| homeassistant.components.configurator.* | ||||
| homeassistant.components.cookidoo.* | ||||
| @@ -182,6 +181,7 @@ homeassistant.components.efergy.* | ||||
| homeassistant.components.eheimdigital.* | ||||
| homeassistant.components.electrasmart.* | ||||
| homeassistant.components.electric_kiwi.* | ||||
| homeassistant.components.elevenlabs.* | ||||
| homeassistant.components.elgato.* | ||||
| homeassistant.components.elkm1.* | ||||
| homeassistant.components.emulated_hue.* | ||||
| @@ -202,7 +202,6 @@ homeassistant.components.feedreader.* | ||||
| homeassistant.components.file_upload.* | ||||
| homeassistant.components.filesize.* | ||||
| homeassistant.components.filter.* | ||||
| homeassistant.components.firefly_iii.* | ||||
| homeassistant.components.fitbit.* | ||||
| homeassistant.components.flexit_bacnet.* | ||||
| homeassistant.components.flux_led.* | ||||
| @@ -220,7 +219,6 @@ homeassistant.components.generic_thermostat.* | ||||
| homeassistant.components.geo_location.* | ||||
| homeassistant.components.geocaching.* | ||||
| homeassistant.components.gios.* | ||||
| homeassistant.components.github.* | ||||
| homeassistant.components.glances.* | ||||
| homeassistant.components.go2rtc.* | ||||
| homeassistant.components.goalzero.* | ||||
| @@ -278,7 +276,6 @@ homeassistant.components.imap.* | ||||
| homeassistant.components.imgw_pib.* | ||||
| homeassistant.components.immich.* | ||||
| homeassistant.components.incomfort.* | ||||
| homeassistant.components.inels.* | ||||
| homeassistant.components.input_button.* | ||||
| homeassistant.components.input_select.* | ||||
| homeassistant.components.input_text.* | ||||
| @@ -327,7 +324,6 @@ homeassistant.components.london_underground.* | ||||
| homeassistant.components.lookin.* | ||||
| homeassistant.components.lovelace.* | ||||
| homeassistant.components.luftdaten.* | ||||
| homeassistant.components.lunatone.* | ||||
| homeassistant.components.madvr.* | ||||
| homeassistant.components.manual.* | ||||
| homeassistant.components.mastodon.* | ||||
| @@ -446,7 +442,6 @@ homeassistant.components.rituals_perfume_genie.* | ||||
| homeassistant.components.roborock.* | ||||
| homeassistant.components.roku.* | ||||
| homeassistant.components.romy.* | ||||
| homeassistant.components.route_b_smart_meter.* | ||||
| homeassistant.components.rpi_power.* | ||||
| homeassistant.components.rss_feed_template.* | ||||
| homeassistant.components.russound_rio.* | ||||
| @@ -478,7 +473,6 @@ homeassistant.components.skybell.* | ||||
| homeassistant.components.slack.* | ||||
| homeassistant.components.sleep_as_android.* | ||||
| homeassistant.components.sleepiq.* | ||||
| homeassistant.components.sma.* | ||||
| homeassistant.components.smhi.* | ||||
| homeassistant.components.smlight.* | ||||
| homeassistant.components.smtp.* | ||||
| @@ -557,7 +551,6 @@ homeassistant.components.vacuum.* | ||||
| homeassistant.components.vallox.* | ||||
| homeassistant.components.valve.* | ||||
| homeassistant.components.velbus.* | ||||
| homeassistant.components.vivotek.* | ||||
| homeassistant.components.vlc_telnet.* | ||||
| homeassistant.components.vodafone_station.* | ||||
| homeassistant.components.volvo.* | ||||
|   | ||||
| @@ -7,19 +7,13 @@ | ||||
|   "python.testing.pytestEnabled": false, | ||||
|   // https://code.visualstudio.com/docs/python/linting#_general-settings | ||||
|   "pylint.importStrategy": "fromEnvironment", | ||||
|   // Pyright is too pedantic for Home Assistant | ||||
|   "python.analysis.typeCheckingMode": "basic", | ||||
|   "[python]": { | ||||
|     "editor.defaultFormatter": "charliermarsh.ruff" | ||||
|   }, | ||||
|   "[json][jsonc][yaml]": { | ||||
|     "editor.defaultFormatter": "esbenp.prettier-vscode" | ||||
|   }, | ||||
|   "json.schemas": [ | ||||
|     { | ||||
|       "fileMatch": ["homeassistant/components/*/manifest.json"], | ||||
|       // This value differs between working with devcontainer and locally, therefore this value should NOT be in sync! | ||||
|       "url": "./script/json_schemas/manifest_schema.json" | ||||
|     } | ||||
|   ] | ||||
|         { | ||||
|             "fileMatch": [ | ||||
|                 "homeassistant/components/*/manifest.json" | ||||
|             ], | ||||
|             // This value differs between working with devcontainer and locally, therefor this value should NOT be in sync! | ||||
|             "url": "./script/json_schemas/manifest_schema.json" | ||||
|         } | ||||
|     ] | ||||
| } | ||||
							
								
								
									
										52
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										52
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							| @@ -46,8 +46,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/accuweather/ @bieniu | ||||
| /homeassistant/components/acmeda/ @atmurray | ||||
| /tests/components/acmeda/ @atmurray | ||||
| /homeassistant/components/actron_air/ @kclif9 @JagadishDhanamjayam | ||||
| /tests/components/actron_air/ @kclif9 @JagadishDhanamjayam | ||||
| /homeassistant/components/adax/ @danielhiversen @lazytarget | ||||
| /tests/components/adax/ @danielhiversen @lazytarget | ||||
| /homeassistant/components/adguard/ @frenck | ||||
| @@ -109,8 +107,8 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/ambient_station/ @bachya | ||||
| /tests/components/ambient_station/ @bachya | ||||
| /homeassistant/components/amcrest/ @flacjacket | ||||
| /homeassistant/components/analytics/ @home-assistant/core | ||||
| /tests/components/analytics/ @home-assistant/core | ||||
| /homeassistant/components/analytics/ @home-assistant/core @ludeeus | ||||
| /tests/components/analytics/ @home-assistant/core @ludeeus | ||||
| /homeassistant/components/analytics_insights/ @joostlek | ||||
| /tests/components/analytics_insights/ @joostlek | ||||
| /homeassistant/components/android_ip_webcam/ @engrbm87 | ||||
| @@ -294,8 +292,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/command_line/ @gjohansson-ST | ||||
| /homeassistant/components/compensation/ @Petro31 | ||||
| /tests/components/compensation/ @Petro31 | ||||
| /homeassistant/components/compit/ @Przemko92 | ||||
| /tests/components/compit/ @Przemko92 | ||||
| /homeassistant/components/config/ @home-assistant/core | ||||
| /tests/components/config/ @home-assistant/core | ||||
| /homeassistant/components/configurator/ @home-assistant/core | ||||
| @@ -318,8 +314,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/crownstone/ @Crownstone @RicArch97 | ||||
| /homeassistant/components/cups/ @fabaff | ||||
| /tests/components/cups/ @fabaff | ||||
| /homeassistant/components/cync/ @Kinachi249 | ||||
| /tests/components/cync/ @Kinachi249 | ||||
| /homeassistant/components/daikin/ @fredrike | ||||
| /tests/components/daikin/ @fredrike | ||||
| /homeassistant/components/date/ @home-assistant/core | ||||
| @@ -414,8 +408,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/egardia/ @jeroenterheerdt | ||||
| /homeassistant/components/eheimdigital/ @autinerd | ||||
| /tests/components/eheimdigital/ @autinerd | ||||
| /homeassistant/components/ekeybionyx/ @richardpolzer | ||||
| /tests/components/ekeybionyx/ @richardpolzer | ||||
| /homeassistant/components/electrasmart/ @jafar-atili | ||||
| /tests/components/electrasmart/ @jafar-atili | ||||
| /homeassistant/components/electric_kiwi/ @mikey0000 | ||||
| @@ -494,10 +486,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/filesize/ @gjohansson-ST | ||||
| /homeassistant/components/filter/ @dgomes | ||||
| /tests/components/filter/ @dgomes | ||||
| /homeassistant/components/fing/ @Lorenzo-Gasparini | ||||
| /tests/components/fing/ @Lorenzo-Gasparini | ||||
| /homeassistant/components/firefly_iii/ @erwindouna | ||||
| /tests/components/firefly_iii/ @erwindouna | ||||
| /homeassistant/components/fireservicerota/ @cyberjunky | ||||
| /tests/components/fireservicerota/ @cyberjunky | ||||
| /homeassistant/components/firmata/ @DaAwesomeP | ||||
| @@ -621,8 +609,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/greeneye_monitor/ @jkeljo | ||||
| /homeassistant/components/group/ @home-assistant/core | ||||
| /tests/components/group/ @home-assistant/core | ||||
| /homeassistant/components/growatt_server/ @johanzander | ||||
| /tests/components/growatt_server/ @johanzander | ||||
| /homeassistant/components/guardian/ @bachya | ||||
| /tests/components/guardian/ @bachya | ||||
| /homeassistant/components/habitica/ @tr4nt0r | ||||
| @@ -743,8 +729,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/improv_ble/ @emontnemery | ||||
| /homeassistant/components/incomfort/ @jbouwh | ||||
| /tests/components/incomfort/ @jbouwh | ||||
| /homeassistant/components/inels/ @epdevlab | ||||
| /tests/components/inels/ @epdevlab | ||||
| /homeassistant/components/influxdb/ @mdegat01 | ||||
| /tests/components/influxdb/ @mdegat01 | ||||
| /homeassistant/components/inkbird/ @bdraco | ||||
| @@ -770,8 +754,8 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | ||||
| /tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | ||||
| /homeassistant/components/intesishome/ @jnimmo | ||||
| /homeassistant/components/iometer/ @jukrebs | ||||
| /tests/components/iometer/ @jukrebs | ||||
| /homeassistant/components/iometer/ @MaestroOnICe | ||||
| /tests/components/iometer/ @MaestroOnICe | ||||
| /homeassistant/components/ios/ @robbiet480 | ||||
| /tests/components/ios/ @robbiet480 | ||||
| /homeassistant/components/iotawatt/ @gtdiehl @jyavenard | ||||
| @@ -786,8 +770,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/iqvia/ @bachya | ||||
| /tests/components/iqvia/ @bachya | ||||
| /homeassistant/components/irish_rail_transport/ @ttroy50 | ||||
| /homeassistant/components/irm_kmi/ @jdejaegh | ||||
| /tests/components/irm_kmi/ @jdejaegh | ||||
| /homeassistant/components/iron_os/ @tr4nt0r | ||||
| /tests/components/iron_os/ @tr4nt0r | ||||
| /homeassistant/components/isal/ @bdraco | ||||
| @@ -918,8 +900,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/luci/ @mzdrale | ||||
| /homeassistant/components/luftdaten/ @fabaff @frenck | ||||
| /tests/components/luftdaten/ @fabaff @frenck | ||||
| /homeassistant/components/lunatone/ @MoonDevLT | ||||
| /tests/components/lunatone/ @MoonDevLT | ||||
| /homeassistant/components/lupusec/ @majuss @suaveolent | ||||
| /tests/components/lupusec/ @majuss @suaveolent | ||||
| /homeassistant/components/lutron/ @cdheiser @wilburCForce | ||||
| @@ -965,8 +945,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/met_eireann/ @DylanGore | ||||
| /homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame | ||||
| /tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame | ||||
| /homeassistant/components/meteo_lt/ @xE1H | ||||
| /tests/components/meteo_lt/ @xE1H | ||||
| /homeassistant/components/meteoalarm/ @rolfberkenbosch | ||||
| /homeassistant/components/meteoclimatic/ @adrianmo | ||||
| /tests/components/meteoclimatic/ @adrianmo | ||||
| @@ -990,6 +968,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/moat/ @bdraco | ||||
| /homeassistant/components/mobile_app/ @home-assistant/core | ||||
| /tests/components/mobile_app/ @home-assistant/core | ||||
| /homeassistant/components/modbus/ @janiversen | ||||
| /tests/components/modbus/ @janiversen | ||||
| /homeassistant/components/modem_callerid/ @tkdrob | ||||
| /tests/components/modem_callerid/ @tkdrob | ||||
| /homeassistant/components/modern_forms/ @wonderslug | ||||
| @@ -1073,8 +1053,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/nilu/ @hfurubotten | ||||
| /homeassistant/components/nina/ @DeerMaximum | ||||
| /tests/components/nina/ @DeerMaximum | ||||
| /homeassistant/components/nintendo_parental_controls/ @pantherale0 | ||||
| /tests/components/nintendo_parental_controls/ @pantherale0 | ||||
| /homeassistant/components/nissan_leaf/ @filcole | ||||
| /homeassistant/components/noaa_tides/ @jdelaney72 | ||||
| /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe | ||||
| @@ -1143,8 +1121,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/opengarage/ @danielhiversen | ||||
| /homeassistant/components/openhome/ @bazwilliams | ||||
| /tests/components/openhome/ @bazwilliams | ||||
| /homeassistant/components/openrgb/ @felipecrs | ||||
| /tests/components/openrgb/ @felipecrs | ||||
| /homeassistant/components/opensky/ @joostlek | ||||
| /tests/components/opensky/ @joostlek | ||||
| /homeassistant/components/opentherm_gw/ @mvn23 | ||||
| @@ -1208,6 +1184,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/plex/ @jjlawren | ||||
| /homeassistant/components/plugwise/ @CoMPaTech @bouwew | ||||
| /tests/components/plugwise/ @CoMPaTech @bouwew | ||||
| /homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa | ||||
| /tests/components/plum_lightpad/ @ColinHarrington @prystupa | ||||
| /homeassistant/components/point/ @fredrike | ||||
| /tests/components/point/ @fredrike | ||||
| /homeassistant/components/pooldose/ @lmaertin | ||||
| @@ -1350,8 +1328,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous | ||||
| /homeassistant/components/roon/ @pavoni | ||||
| /tests/components/roon/ @pavoni | ||||
| /homeassistant/components/route_b_smart_meter/ @SeraphicRav | ||||
| /tests/components/route_b_smart_meter/ @SeraphicRav | ||||
| /homeassistant/components/rpi_power/ @shenxn @swetoast | ||||
| /tests/components/rpi_power/ @shenxn @swetoast | ||||
| /homeassistant/components/rss_feed_template/ @home-assistant/core | ||||
| @@ -1423,8 +1399,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/sfr_box/ @epenet | ||||
| /homeassistant/components/sftp_storage/ @maretodoric | ||||
| /tests/components/sftp_storage/ @maretodoric | ||||
| /homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre | ||||
| /tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre | ||||
| /homeassistant/components/sharkiq/ @JeffResc @funkybunch | ||||
| /tests/components/sharkiq/ @JeffResc @funkybunch | ||||
| /homeassistant/components/shell_command/ @home-assistant/core | ||||
| /tests/components/shell_command/ @home-assistant/core | ||||
| /homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco | ||||
| @@ -1489,8 +1465,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/snoo/ @Lash-L | ||||
| /homeassistant/components/snooz/ @AustinBrunkhorst | ||||
| /tests/components/snooz/ @AustinBrunkhorst | ||||
| /homeassistant/components/solaredge/ @frenck @bdraco @tronikos | ||||
| /tests/components/solaredge/ @frenck @bdraco @tronikos | ||||
| /homeassistant/components/solaredge/ @frenck @bdraco | ||||
| /tests/components/solaredge/ @frenck @bdraco | ||||
| /homeassistant/components/solaredge_local/ @drobtravels @scheric | ||||
| /homeassistant/components/solarlog/ @Ernst79 @dontinelli | ||||
| /tests/components/solarlog/ @Ernst79 @dontinelli | ||||
| @@ -1751,8 +1727,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/volumio/ @OnFreund | ||||
| /homeassistant/components/volvo/ @thomasddn | ||||
| /tests/components/volvo/ @thomasddn | ||||
| /homeassistant/components/volvooncall/ @molobrakos @svrooij | ||||
| /tests/components/volvooncall/ @molobrakos @svrooij | ||||
| /homeassistant/components/volvooncall/ @molobrakos | ||||
| /tests/components/volvooncall/ @molobrakos | ||||
| /homeassistant/components/wake_on_lan/ @ntilley905 | ||||
| /tests/components/wake_on_lan/ @ntilley905 | ||||
| /homeassistant/components/wake_word/ @home-assistant/core @synesthesiam | ||||
|   | ||||
							
								
								
									
										4
									
								
								Dockerfile
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										4
									
								
								Dockerfile
									
									
									
										generated
									
									
									
								
							| @@ -25,13 +25,13 @@ RUN \ | ||||
|         "armv7") go2rtc_suffix='arm' ;; \ | ||||
|         *) go2rtc_suffix=${BUILD_ARCH} ;; \ | ||||
|     esac \ | ||||
|     && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ | ||||
|     && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ | ||||
|     && chmod +x /bin/go2rtc \ | ||||
|     # Verify go2rtc can be executed | ||||
|     && go2rtc --version | ||||
|  | ||||
| # Install uv | ||||
| RUN pip3 install uv==0.9.5 | ||||
| RUN pip3 install uv==0.8.9 | ||||
|  | ||||
| WORKDIR /usr/src | ||||
|  | ||||
|   | ||||
| @@ -34,11 +34,9 @@ WORKDIR /usr/src | ||||
|  | ||||
| COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv | ||||
|  | ||||
| RUN uv python install 3.13.2 | ||||
|  | ||||
| USER vscode | ||||
|  | ||||
| COPY .python-version ./ | ||||
| RUN uv python install | ||||
|  | ||||
| ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv" | ||||
| RUN uv venv $VIRTUAL_ENV | ||||
| ENV PATH="$VIRTUAL_ENV/bin:$PATH" | ||||
|   | ||||
							
								
								
									
										13
									
								
								build.yaml
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								build.yaml
									
									
									
									
									
								
							| @@ -1,10 +1,13 @@ | ||||
| image: ghcr.io/home-assistant/{arch}-homeassistant | ||||
| build_from: | ||||
|   aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1 | ||||
|   armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1 | ||||
|   armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1 | ||||
|   amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1 | ||||
|   i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1 | ||||
|   aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1 | ||||
|   armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1 | ||||
|   armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1 | ||||
|   amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1 | ||||
|   i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1 | ||||
| codenotary: | ||||
|   signer: notary@home-assistant.io | ||||
|   base_image: notary@home-assistant.io | ||||
| cosign: | ||||
|   base_identity: https://github.com/home-assistant/docker/.* | ||||
|   identity: https://github.com/home-assistant/core/.* | ||||
|   | ||||
| @@ -34,9 +34,6 @@ INPUT_FIELD_CODE = "code" | ||||
|  | ||||
| DUMMY_SECRET = "FPPTH34D4E3MI2HG" | ||||
|  | ||||
| GOOGLE_AUTHENTICATOR_URL = "https://support.google.com/accounts/answer/1066447" | ||||
| AUTHY_URL = "https://authy.com/" | ||||
|  | ||||
|  | ||||
| def _generate_qr_code(data: str) -> str: | ||||
|     """Generate a base64 PNG string represent QR Code image of data.""" | ||||
| @@ -232,8 +229,6 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]): | ||||
|                 "code": self._ota_secret, | ||||
|                 "url": self._url, | ||||
|                 "qr_code": self._image, | ||||
|                 "google_authenticator_url": GOOGLE_AUTHENTICATOR_URL, | ||||
|                 "authy_url": AUTHY_URL, | ||||
|             }, | ||||
|             errors=errors, | ||||
|         ) | ||||
|   | ||||
| @@ -616,34 +616,34 @@ async def async_enable_logging( | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     logger = logging.getLogger() | ||||
|     logger.setLevel(logging.INFO if verbose else logging.WARNING) | ||||
|  | ||||
|     # Log errors to a file if we have write access to file or config dir | ||||
|     if log_file is None: | ||||
|         default_log_path = hass.config.path(ERROR_LOG_FILENAME) | ||||
|         if "SUPERVISOR" in os.environ: | ||||
|             _LOGGER.info("Running in Supervisor, not logging to file") | ||||
|             # Rename the default log file if it exists, since previous versions created | ||||
|             # it even on Supervisor | ||||
|             if os.path.isfile(default_log_path): | ||||
|                 with contextlib.suppress(OSError): | ||||
|                     os.rename(default_log_path, f"{default_log_path}.old") | ||||
|             err_log_path = None | ||||
|         else: | ||||
|             err_log_path = default_log_path | ||||
|         err_log_path = hass.config.path(ERROR_LOG_FILENAME) | ||||
|     else: | ||||
|         err_log_path = os.path.abspath(log_file) | ||||
|  | ||||
|     if err_log_path: | ||||
|     err_path_exists = os.path.isfile(err_log_path) | ||||
|     err_dir = os.path.dirname(err_log_path) | ||||
|  | ||||
|     # Check if we can write to the error log if it exists or that | ||||
|     # we can create files in the containing directory if not. | ||||
|     if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( | ||||
|         not err_path_exists and os.access(err_dir, os.W_OK) | ||||
|     ): | ||||
|         err_handler = await hass.async_add_executor_job( | ||||
|             _create_log_file, err_log_path, log_rotate_days | ||||
|         ) | ||||
|  | ||||
|         err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) | ||||
|  | ||||
|         logger = logging.getLogger() | ||||
|         logger.addHandler(err_handler) | ||||
|         logger.setLevel(logging.INFO if verbose else logging.WARNING) | ||||
|  | ||||
|         # Save the log file location for access by other components. | ||||
|         hass.data[DATA_LOGGING] = err_log_path | ||||
|     else: | ||||
|         _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path) | ||||
|  | ||||
|     async_activate_log_queue_handler(hass) | ||||
|  | ||||
|   | ||||
| @@ -1,5 +0,0 @@ | ||||
| { | ||||
|   "domain": "eltako", | ||||
|   "name": "Eltako", | ||||
|   "iot_standards": ["matter"] | ||||
| } | ||||
							
								
								
									
										5
									
								
								homeassistant/brands/ibm.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								homeassistant/brands/ibm.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| { | ||||
|   "domain": "ibm", | ||||
|   "name": "IBM", | ||||
|   "integrations": ["watson_iot", "watson_tts"] | ||||
| } | ||||
| @@ -1,5 +0,0 @@ | ||||
| { | ||||
|   "domain": "konnected", | ||||
|   "name": "Konnected", | ||||
|   "integrations": ["konnected", "konnected_esphome"] | ||||
| } | ||||
| @@ -1,5 +0,0 @@ | ||||
| { | ||||
|   "domain": "level", | ||||
|   "name": "Level", | ||||
|   "iot_standards": ["matter"] | ||||
| } | ||||
| @@ -8,17 +8,14 @@ import logging | ||||
| from aioacaia.acaiascale import AcaiaScale | ||||
| from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError | ||||
|  | ||||
| from homeassistant.components.bluetooth import async_get_scanner | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_ADDRESS | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.debounce import Debouncer | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator | ||||
|  | ||||
| from .const import CONF_IS_NEW_STYLE_SCALE | ||||
|  | ||||
| SCAN_INTERVAL = timedelta(seconds=15) | ||||
| UPDATE_DEBOUNCE_TIME = 0.2 | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -40,20 +37,11 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]): | ||||
|             config_entry=entry, | ||||
|         ) | ||||
|  | ||||
|         debouncer = Debouncer( | ||||
|             hass=hass, | ||||
|             logger=_LOGGER, | ||||
|             cooldown=UPDATE_DEBOUNCE_TIME, | ||||
|             immediate=True, | ||||
|             function=self.async_update_listeners, | ||||
|         ) | ||||
|  | ||||
|         self._scale = AcaiaScale( | ||||
|             address_or_ble_device=entry.data[CONF_ADDRESS], | ||||
|             name=entry.title, | ||||
|             is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], | ||||
|             notify_callback=debouncer.async_schedule_call, | ||||
|             scanner=async_get_scanner(hass), | ||||
|             notify_callback=self.async_update_listeners, | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|   | ||||
| @@ -26,5 +26,5 @@ | ||||
|   "iot_class": "local_push", | ||||
|   "loggers": ["aioacaia"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["aioacaia==0.1.17"] | ||||
|   "requirements": ["aioacaia==0.1.14"] | ||||
| } | ||||
|   | ||||
| @@ -3,7 +3,6 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from asyncio import timeout | ||||
| from collections.abc import Mapping | ||||
| from typing import Any | ||||
|  | ||||
| from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError | ||||
| @@ -23,8 +22,6 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|     """Config flow for AccuWeather.""" | ||||
|  | ||||
|     VERSION = 1 | ||||
|     _latitude: float | None = None | ||||
|     _longitude: float | None = None | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
| @@ -77,46 +74,3 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|             ), | ||||
|             errors=errors, | ||||
|         ) | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, entry_data: Mapping[str, Any] | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle configuration by re-auth.""" | ||||
|         self._latitude = entry_data[CONF_LATITUDE] | ||||
|         self._longitude = entry_data[CONF_LONGITUDE] | ||||
|  | ||||
|         return await self.async_step_reauth_confirm() | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Dialog that informs the user that reauth is required.""" | ||||
|         errors: dict[str, str] = {} | ||||
|  | ||||
|         if user_input is not None: | ||||
|             websession = async_get_clientsession(self.hass) | ||||
|             try: | ||||
|                 async with timeout(10): | ||||
|                     accuweather = AccuWeather( | ||||
|                         user_input[CONF_API_KEY], | ||||
|                         websession, | ||||
|                         latitude=self._latitude, | ||||
|                         longitude=self._longitude, | ||||
|                     ) | ||||
|                     await accuweather.async_get_location() | ||||
|             except (ApiError, ClientConnectorError, TimeoutError, ClientError): | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except InvalidApiKeyError: | ||||
|                 errors["base"] = "invalid_api_key" | ||||
|             except RequestsExceededError: | ||||
|                 errors["base"] = "requests_exceeded" | ||||
|             else: | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     self._get_reauth_entry(), data_updates=user_input | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), | ||||
|             errors=errors, | ||||
|         ) | ||||
|   | ||||
| @@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = { | ||||
| } | ||||
| UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10) | ||||
| UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) | ||||
| UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30) | ||||
| UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30) | ||||
|   | ||||
| @@ -15,7 +15,6 @@ from aiohttp.client_exceptions import ClientConnectorError | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_NAME | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed | ||||
| from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo | ||||
| from homeassistant.helpers.update_coordinator import ( | ||||
|     DataUpdateCoordinator, | ||||
| @@ -31,7 +30,7 @@ from .const import ( | ||||
|     UPDATE_INTERVAL_OBSERVATION, | ||||
| ) | ||||
|  | ||||
| EXCEPTIONS = (ApiError, ClientConnectorError, RequestsExceededError) | ||||
| EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError) | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -53,8 +52,6 @@ class AccuWeatherObservationDataUpdateCoordinator( | ||||
| ): | ||||
|     """Class to manage fetching AccuWeather data API.""" | ||||
|  | ||||
|     config_entry: AccuWeatherConfigEntry | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
| @@ -90,12 +87,6 @@ class AccuWeatherObservationDataUpdateCoordinator( | ||||
|                 translation_key="current_conditions_update_error", | ||||
|                 translation_placeholders={"error": repr(error)}, | ||||
|             ) from error | ||||
|         except InvalidApiKeyError as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="auth_error", | ||||
|                 translation_placeholders={"entry": self.config_entry.title}, | ||||
|             ) from err | ||||
|  | ||||
|         _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) | ||||
|  | ||||
| @@ -107,8 +98,6 @@ class AccuWeatherForecastDataUpdateCoordinator( | ||||
| ): | ||||
|     """Base class for AccuWeather forecast.""" | ||||
|  | ||||
|     config_entry: AccuWeatherConfigEntry | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
| @@ -148,12 +137,6 @@ class AccuWeatherForecastDataUpdateCoordinator( | ||||
|                 translation_key="forecast_update_error", | ||||
|                 translation_placeholders={"error": repr(error)}, | ||||
|             ) from error | ||||
|         except InvalidApiKeyError as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="auth_error", | ||||
|                 translation_placeholders={"entry": self.config_entry.title}, | ||||
|             ) from err | ||||
|  | ||||
|         _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) | ||||
|         return result | ||||
|   | ||||
| @@ -1,9 +1,6 @@ | ||||
| { | ||||
|   "entity": { | ||||
|     "sensor": { | ||||
|       "air_quality": { | ||||
|         "default": "mdi:air-filter" | ||||
|       }, | ||||
|       "cloud_ceiling": { | ||||
|         "default": "mdi:weather-fog" | ||||
|       }, | ||||
| @@ -37,6 +34,9 @@ | ||||
|       "thunderstorm_probability_night": { | ||||
|         "default": "mdi:weather-lightning" | ||||
|       }, | ||||
|       "translation_key": { | ||||
|         "default": "mdi:air-filter" | ||||
|       }, | ||||
|       "tree_pollen": { | ||||
|         "default": "mdi:tree-outline" | ||||
|       }, | ||||
|   | ||||
| @@ -7,5 +7,5 @@ | ||||
|   "integration_type": "service", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["accuweather"], | ||||
|   "requirements": ["accuweather==4.2.2"] | ||||
|   "requirements": ["accuweather==4.2.1"] | ||||
| } | ||||
|   | ||||
| @@ -7,17 +7,6 @@ | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||
|           "latitude": "[%key:common::config_flow::data::latitude%]", | ||||
|           "longitude": "[%key:common::config_flow::data::longitude%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "api_key": "API key generated in the AccuWeather APIs portal." | ||||
|         } | ||||
|       }, | ||||
|       "reauth_confirm": { | ||||
|         "data": { | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "api_key": "[%key:component::accuweather::config::step::user::data_description::api_key%]" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
| @@ -30,8 +19,7 @@ | ||||
|       "requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key." | ||||
|     }, | ||||
|     "abort": { | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_location%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
| @@ -251,9 +239,6 @@ | ||||
|     } | ||||
|   }, | ||||
|   "exceptions": { | ||||
|     "auth_error": { | ||||
|       "message": "Authentication failed for {entry}, please update your API key" | ||||
|     }, | ||||
|     "current_conditions_update_error": { | ||||
|       "message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}" | ||||
|     }, | ||||
|   | ||||
| @@ -1,57 +0,0 @@ | ||||
| """The Actron Air integration.""" | ||||
|  | ||||
| from actron_neo_api import ( | ||||
|     ActronAirNeoACSystem, | ||||
|     ActronNeoAPI, | ||||
|     ActronNeoAPIError, | ||||
|     ActronNeoAuthError, | ||||
| ) | ||||
|  | ||||
| from homeassistant.const import CONF_API_TOKEN, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
|  | ||||
| from .const import _LOGGER | ||||
| from .coordinator import ( | ||||
|     ActronAirConfigEntry, | ||||
|     ActronAirRuntimeData, | ||||
|     ActronAirSystemCoordinator, | ||||
| ) | ||||
|  | ||||
| PLATFORM = [Platform.CLIMATE] | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool: | ||||
|     """Set up Actron Air integration from a config entry.""" | ||||
|  | ||||
|     api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN]) | ||||
|     systems: list[ActronAirNeoACSystem] = [] | ||||
|  | ||||
|     try: | ||||
|         systems = await api.get_ac_systems() | ||||
|         await api.update_status() | ||||
|     except ActronNeoAuthError: | ||||
|         _LOGGER.error("Authentication error while setting up Actron Air integration") | ||||
|         raise | ||||
|     except ActronNeoAPIError as err: | ||||
|         _LOGGER.error("API error while setting up Actron Air integration: %s", err) | ||||
|         raise | ||||
|  | ||||
|     system_coordinators: dict[str, ActronAirSystemCoordinator] = {} | ||||
|     for system in systems: | ||||
|         coordinator = ActronAirSystemCoordinator(hass, entry, api, system) | ||||
|         _LOGGER.debug("Setting up coordinator for system: %s", system["serial"]) | ||||
|         await coordinator.async_config_entry_first_refresh() | ||||
|         system_coordinators[system["serial"]] = coordinator | ||||
|  | ||||
|     entry.runtime_data = ActronAirRuntimeData( | ||||
|         api=api, | ||||
|         system_coordinators=system_coordinators, | ||||
|     ) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORM) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool: | ||||
|     """Unload a config entry.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, PLATFORM) | ||||
| @@ -1,259 +0,0 @@ | ||||
| """Climate platform for Actron Air integration.""" | ||||
|  | ||||
| from typing import Any | ||||
|  | ||||
| from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone | ||||
|  | ||||
| from homeassistant.components.climate import ( | ||||
|     FAN_AUTO, | ||||
|     FAN_HIGH, | ||||
|     FAN_LOW, | ||||
|     FAN_MEDIUM, | ||||
|     ClimateEntity, | ||||
|     ClimateEntityFeature, | ||||
|     HVACMode, | ||||
| ) | ||||
| from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.device_registry import DeviceInfo | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity | ||||
|  | ||||
| from .const import DOMAIN | ||||
| from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator | ||||
|  | ||||
| PARALLEL_UPDATES = 0 | ||||
|  | ||||
| FAN_MODE_MAPPING_ACTRONAIR_TO_HA = { | ||||
|     "AUTO": FAN_AUTO, | ||||
|     "LOW": FAN_LOW, | ||||
|     "MED": FAN_MEDIUM, | ||||
|     "HIGH": FAN_HIGH, | ||||
| } | ||||
| FAN_MODE_MAPPING_HA_TO_ACTRONAIR = { | ||||
|     v: k for k, v in FAN_MODE_MAPPING_ACTRONAIR_TO_HA.items() | ||||
| } | ||||
| HVAC_MODE_MAPPING_ACTRONAIR_TO_HA = { | ||||
|     "COOL": HVACMode.COOL, | ||||
|     "HEAT": HVACMode.HEAT, | ||||
|     "FAN": HVACMode.FAN_ONLY, | ||||
|     "AUTO": HVACMode.AUTO, | ||||
|     "OFF": HVACMode.OFF, | ||||
| } | ||||
| HVAC_MODE_MAPPING_HA_TO_ACTRONAIR = { | ||||
|     v: k for k, v in HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.items() | ||||
| } | ||||
|  | ||||
|  | ||||
| async def async_setup_entry( | ||||
|     hass: HomeAssistant, | ||||
|     entry: ActronAirConfigEntry, | ||||
|     async_add_entities: AddConfigEntryEntitiesCallback, | ||||
| ) -> None: | ||||
|     """Set up Actron Air climate entities.""" | ||||
|     system_coordinators = entry.runtime_data.system_coordinators | ||||
|     entities: list[ClimateEntity] = [] | ||||
|  | ||||
|     for coordinator in system_coordinators.values(): | ||||
|         status = coordinator.data | ||||
|         name = status.ac_system.system_name | ||||
|         entities.append(ActronSystemClimate(coordinator, name)) | ||||
|  | ||||
|         entities.extend( | ||||
|             ActronZoneClimate(coordinator, zone) | ||||
|             for zone in status.remote_zone_info | ||||
|             if zone.exists | ||||
|         ) | ||||
|  | ||||
|     async_add_entities(entities) | ||||
|  | ||||
|  | ||||
| class BaseClimateEntity(CoordinatorEntity[ActronAirSystemCoordinator], ClimateEntity): | ||||
|     """Base class for Actron Air climate entities.""" | ||||
|  | ||||
|     _attr_has_entity_name = True | ||||
|     _attr_temperature_unit = UnitOfTemperature.CELSIUS | ||||
|     _attr_supported_features = ( | ||||
|         ClimateEntityFeature.TARGET_TEMPERATURE | ||||
|         | ClimateEntityFeature.FAN_MODE | ||||
|         | ClimateEntityFeature.TURN_ON | ||||
|         | ClimateEntityFeature.TURN_OFF | ||||
|     ) | ||||
|     _attr_name = None | ||||
|     _attr_fan_modes = list(FAN_MODE_MAPPING_ACTRONAIR_TO_HA.values()) | ||||
|     _attr_hvac_modes = list(HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.values()) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         coordinator: ActronAirSystemCoordinator, | ||||
|         name: str, | ||||
|     ) -> None: | ||||
|         """Initialize an Actron Air unit.""" | ||||
|         super().__init__(coordinator) | ||||
|         self._serial_number = coordinator.serial_number | ||||
|  | ||||
|  | ||||
| class ActronSystemClimate(BaseClimateEntity): | ||||
|     """Representation of the Actron Air system.""" | ||||
|  | ||||
|     _attr_supported_features = ( | ||||
|         ClimateEntityFeature.TARGET_TEMPERATURE | ||||
|         | ClimateEntityFeature.FAN_MODE | ||||
|         | ClimateEntityFeature.TURN_ON | ||||
|         | ClimateEntityFeature.TURN_OFF | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         coordinator: ActronAirSystemCoordinator, | ||||
|         name: str, | ||||
|     ) -> None: | ||||
|         """Initialize an Actron Air unit.""" | ||||
|         super().__init__(coordinator, name) | ||||
|         serial_number = coordinator.serial_number | ||||
|         self._attr_unique_id = serial_number | ||||
|         self._attr_device_info = DeviceInfo( | ||||
|             identifiers={(DOMAIN, serial_number)}, | ||||
|             name=self._status.ac_system.system_name, | ||||
|             manufacturer="Actron Air", | ||||
|             model_id=self._status.ac_system.master_wc_model, | ||||
|             sw_version=self._status.ac_system.master_wc_firmware_version, | ||||
|             serial_number=serial_number, | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def min_temp(self) -> float: | ||||
|         """Return the minimum temperature that can be set.""" | ||||
|         return self._status.min_temp | ||||
|  | ||||
|     @property | ||||
|     def max_temp(self) -> float: | ||||
|         """Return the maximum temperature that can be set.""" | ||||
|         return self._status.max_temp | ||||
|  | ||||
|     @property | ||||
|     def _status(self) -> ActronAirNeoStatus: | ||||
|         """Get the current status from the coordinator.""" | ||||
|         return self.coordinator.data | ||||
|  | ||||
|     @property | ||||
|     def hvac_mode(self) -> HVACMode | None: | ||||
|         """Return the current HVAC mode.""" | ||||
|         if not self._status.user_aircon_settings.is_on: | ||||
|             return HVACMode.OFF | ||||
|  | ||||
|         mode = self._status.user_aircon_settings.mode | ||||
|         return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode) | ||||
|  | ||||
|     @property | ||||
|     def fan_mode(self) -> str | None: | ||||
|         """Return the current fan mode.""" | ||||
|         fan_mode = self._status.user_aircon_settings.fan_mode | ||||
|         return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode) | ||||
|  | ||||
|     @property | ||||
|     def current_humidity(self) -> float: | ||||
|         """Return the current humidity.""" | ||||
|         return self._status.master_info.live_humidity_pc | ||||
|  | ||||
|     @property | ||||
|     def current_temperature(self) -> float: | ||||
|         """Return the current temperature.""" | ||||
|         return self._status.master_info.live_temp_c | ||||
|  | ||||
|     @property | ||||
|     def target_temperature(self) -> float: | ||||
|         """Return the target temperature.""" | ||||
|         return self._status.user_aircon_settings.temperature_setpoint_cool_c | ||||
|  | ||||
|     async def async_set_fan_mode(self, fan_mode: str) -> None: | ||||
|         """Set a new fan mode.""" | ||||
|         api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode.lower()) | ||||
|         await self._status.user_aircon_settings.set_fan_mode(api_fan_mode) | ||||
|  | ||||
|     async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: | ||||
|         """Set the HVAC mode.""" | ||||
|         ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode) | ||||
|         await self._status.ac_system.set_system_mode(ac_mode) | ||||
|  | ||||
|     async def async_set_temperature(self, **kwargs: Any) -> None: | ||||
|         """Set the temperature.""" | ||||
|         temp = kwargs.get(ATTR_TEMPERATURE) | ||||
|         await self._status.user_aircon_settings.set_temperature(temperature=temp) | ||||
|  | ||||
|  | ||||
| class ActronZoneClimate(BaseClimateEntity): | ||||
|     """Representation of a zone within the Actron Air system.""" | ||||
|  | ||||
|     _attr_supported_features = ( | ||||
|         ClimateEntityFeature.TARGET_TEMPERATURE | ||||
|         | ClimateEntityFeature.TURN_ON | ||||
|         | ClimateEntityFeature.TURN_OFF | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         coordinator: ActronAirSystemCoordinator, | ||||
|         zone: ActronAirNeoZone, | ||||
|     ) -> None: | ||||
|         """Initialize an Actron Air unit.""" | ||||
|         super().__init__(coordinator, zone.title) | ||||
|         serial_number = coordinator.serial_number | ||||
|         self._zone_id: int = zone.zone_id | ||||
|         self._attr_unique_id: str = f"{serial_number}_zone_{zone.zone_id}" | ||||
|         self._attr_device_info: DeviceInfo = DeviceInfo( | ||||
|             identifiers={(DOMAIN, self._attr_unique_id)}, | ||||
|             name=zone.title, | ||||
|             manufacturer="Actron Air", | ||||
|             model="Zone", | ||||
|             suggested_area=zone.title, | ||||
|             via_device=(DOMAIN, serial_number), | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def min_temp(self) -> float: | ||||
|         """Return the minimum temperature that can be set.""" | ||||
|         return self._zone.min_temp | ||||
|  | ||||
|     @property | ||||
|     def max_temp(self) -> float: | ||||
|         """Return the maximum temperature that can be set.""" | ||||
|         return self._zone.max_temp | ||||
|  | ||||
|     @property | ||||
|     def _zone(self) -> ActronAirNeoZone: | ||||
|         """Get the current zone data from the coordinator.""" | ||||
|         status = self.coordinator.data | ||||
|         return status.zones[self._zone_id] | ||||
|  | ||||
|     @property | ||||
|     def hvac_mode(self) -> HVACMode | None: | ||||
|         """Return the current HVAC mode.""" | ||||
|         if self._zone.is_active: | ||||
|             mode = self._zone.hvac_mode | ||||
|             return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode) | ||||
|         return HVACMode.OFF | ||||
|  | ||||
|     @property | ||||
|     def current_humidity(self) -> float | None: | ||||
|         """Return the current humidity.""" | ||||
|         return self._zone.humidity | ||||
|  | ||||
|     @property | ||||
|     def current_temperature(self) -> float | None: | ||||
|         """Return the current temperature.""" | ||||
|         return self._zone.live_temp_c | ||||
|  | ||||
|     @property | ||||
|     def target_temperature(self) -> float | None: | ||||
|         """Return the target temperature.""" | ||||
|         return self._zone.temperature_setpoint_cool_c | ||||
|  | ||||
|     async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: | ||||
|         """Set the HVAC mode.""" | ||||
|         is_enabled = hvac_mode != HVACMode.OFF | ||||
|         await self._zone.enable(is_enabled) | ||||
|  | ||||
|     async def async_set_temperature(self, **kwargs: Any) -> None: | ||||
|         """Set the temperature.""" | ||||
|         await self._zone.set_temperature(temperature=kwargs["temperature"]) | ||||
| @@ -1,132 +0,0 @@ | ||||
| """Setup config flow for Actron Air integration.""" | ||||
|  | ||||
| import asyncio | ||||
| from typing import Any | ||||
|  | ||||
| from actron_neo_api import ActronNeoAPI, ActronNeoAuthError | ||||
|  | ||||
| from homeassistant.config_entries import ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import CONF_API_TOKEN | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
|  | ||||
| from .const import _LOGGER, DOMAIN | ||||
|  | ||||
|  | ||||
| class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Actron Air.""" | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize the config flow.""" | ||||
|         self._api: ActronNeoAPI | None = None | ||||
|         self._device_code: str | None = None | ||||
|         self._user_code: str = "" | ||||
|         self._verification_uri: str = "" | ||||
|         self._expires_minutes: str = "30" | ||||
|         self.login_task: asyncio.Task | None = None | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the initial step.""" | ||||
|         if self._api is None: | ||||
|             _LOGGER.debug("Initiating device authorization") | ||||
|             self._api = ActronNeoAPI() | ||||
|             try: | ||||
|                 device_code_response = await self._api.request_device_code() | ||||
|             except ActronNeoAuthError as err: | ||||
|                 _LOGGER.error("OAuth2 flow failed: %s", err) | ||||
|                 return self.async_abort(reason="oauth2_error") | ||||
|  | ||||
|             self._device_code = device_code_response["device_code"] | ||||
|             self._user_code = device_code_response["user_code"] | ||||
|             self._verification_uri = device_code_response["verification_uri_complete"] | ||||
|             self._expires_minutes = str(device_code_response["expires_in"] // 60) | ||||
|  | ||||
|         async def _wait_for_authorization() -> None: | ||||
|             """Wait for the user to authorize the device.""" | ||||
|             assert self._api is not None | ||||
|             assert self._device_code is not None | ||||
|             _LOGGER.debug("Waiting for device authorization") | ||||
|             try: | ||||
|                 await self._api.poll_for_token(self._device_code) | ||||
|                 _LOGGER.debug("Authorization successful") | ||||
|             except ActronNeoAuthError as ex: | ||||
|                 _LOGGER.exception("Error while waiting for device authorization") | ||||
|                 raise CannotConnect from ex | ||||
|  | ||||
|         _LOGGER.debug("Checking login task") | ||||
|         if self.login_task is None: | ||||
|             _LOGGER.debug("Creating task for device authorization") | ||||
|             self.login_task = self.hass.async_create_task(_wait_for_authorization()) | ||||
|  | ||||
|         if self.login_task.done(): | ||||
|             _LOGGER.debug("Login task is done, checking results") | ||||
|             if exception := self.login_task.exception(): | ||||
|                 if isinstance(exception, CannotConnect): | ||||
|                     return self.async_show_progress_done( | ||||
|                         next_step_id="connection_error" | ||||
|                     ) | ||||
|                 return self.async_show_progress_done(next_step_id="timeout") | ||||
|             return self.async_show_progress_done(next_step_id="finish_login") | ||||
|  | ||||
|         return self.async_show_progress( | ||||
|             step_id="user", | ||||
|             progress_action="wait_for_authorization", | ||||
|             description_placeholders={ | ||||
|                 "user_code": self._user_code, | ||||
|                 "verification_uri": self._verification_uri, | ||||
|                 "expires_minutes": self._expires_minutes, | ||||
|             }, | ||||
|             progress_task=self.login_task, | ||||
|         ) | ||||
|  | ||||
|     async def async_step_finish_login( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the finalization of login.""" | ||||
|         _LOGGER.debug("Finalizing authorization") | ||||
|         assert self._api is not None | ||||
|  | ||||
|         try: | ||||
|             user_data = await self._api.get_user_info() | ||||
|         except ActronNeoAuthError as err: | ||||
|             _LOGGER.error("Error getting user info: %s", err) | ||||
|             return self.async_abort(reason="oauth2_error") | ||||
|  | ||||
|         unique_id = str(user_data["id"]) | ||||
|         await self.async_set_unique_id(unique_id) | ||||
|         self._abort_if_unique_id_configured() | ||||
|  | ||||
|         return self.async_create_entry( | ||||
|             title=user_data["email"], | ||||
|             data={CONF_API_TOKEN: self._api.refresh_token_value}, | ||||
|         ) | ||||
|  | ||||
|     async def async_step_timeout( | ||||
|         self, | ||||
|         user_input: dict[str, Any] | None = None, | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle issues that need transition await from progress step.""" | ||||
|         if user_input is None: | ||||
|             return self.async_show_form( | ||||
|                 step_id="timeout", | ||||
|             ) | ||||
|         del self.login_task | ||||
|         return await self.async_step_user() | ||||
|  | ||||
|     async def async_step_connection_error( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle connection error from progress step.""" | ||||
|         if user_input is None: | ||||
|             return self.async_show_form(step_id="connection_error") | ||||
|  | ||||
|         # Reset state and try again | ||||
|         self._api = None | ||||
|         self._device_code = None | ||||
|         self.login_task = None | ||||
|         return await self.async_step_user() | ||||
|  | ||||
|  | ||||
| class CannotConnect(HomeAssistantError): | ||||
|     """Error to indicate we cannot connect.""" | ||||
| @@ -1,6 +0,0 @@ | ||||
| """Constants used by Actron Air integration.""" | ||||
|  | ||||
| import logging | ||||
|  | ||||
| _LOGGER = logging.getLogger(__package__) | ||||
| DOMAIN = "actron_air" | ||||
| @@ -1,69 +0,0 @@ | ||||
| """Coordinator for Actron Air integration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from dataclasses import dataclass | ||||
| from datetime import timedelta | ||||
|  | ||||
| from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator | ||||
| from homeassistant.util import dt as dt_util | ||||
|  | ||||
| from .const import _LOGGER | ||||
|  | ||||
| STALE_DEVICE_TIMEOUT = timedelta(hours=24) | ||||
| ERROR_NO_SYSTEMS_FOUND = "no_systems_found" | ||||
| ERROR_UNKNOWN = "unknown_error" | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class ActronAirRuntimeData: | ||||
|     """Runtime data for the Actron Air integration.""" | ||||
|  | ||||
|     api: ActronNeoAPI | ||||
|     system_coordinators: dict[str, ActronAirSystemCoordinator] | ||||
|  | ||||
|  | ||||
| type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData] | ||||
|  | ||||
| AUTH_ERROR_THRESHOLD = 3 | ||||
| SCAN_INTERVAL = timedelta(seconds=30) | ||||
|  | ||||
|  | ||||
| class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]): | ||||
|     """System coordinator for Actron Air integration.""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
|         entry: ActronAirConfigEntry, | ||||
|         api: ActronNeoAPI, | ||||
|         system: ActronAirNeoACSystem, | ||||
|     ) -> None: | ||||
|         """Initialize the coordinator.""" | ||||
|         super().__init__( | ||||
|             hass, | ||||
|             _LOGGER, | ||||
|             name="Actron Air Status", | ||||
|             update_interval=SCAN_INTERVAL, | ||||
|             config_entry=entry, | ||||
|         ) | ||||
|         self.system = system | ||||
|         self.serial_number = system["serial"] | ||||
|         self.api = api | ||||
|         self.status = self.api.state_manager.get_status(self.serial_number) | ||||
|         self.last_seen = dt_util.utcnow() | ||||
|  | ||||
|     async def _async_update_data(self) -> ActronAirNeoStatus: | ||||
|         """Fetch updates and merge incremental changes into the full state.""" | ||||
|         await self.api.update_status() | ||||
|         self.status = self.api.state_manager.get_status(self.serial_number) | ||||
|         self.last_seen = dt_util.utcnow() | ||||
|         return self.status | ||||
|  | ||||
|     def is_device_stale(self) -> bool: | ||||
|         """Check if a device is stale (not seen for a while).""" | ||||
|         return (dt_util.utcnow() - self.last_seen) > STALE_DEVICE_TIMEOUT | ||||
| @@ -1,16 +0,0 @@ | ||||
| { | ||||
|   "domain": "actron_air", | ||||
|   "name": "Actron Air", | ||||
|   "codeowners": ["@kclif9", "@JagadishDhanamjayam"], | ||||
|   "config_flow": true, | ||||
|   "dhcp": [ | ||||
|     { | ||||
|       "hostname": "neo-*", | ||||
|       "macaddress": "FC0FE7*" | ||||
|     } | ||||
|   ], | ||||
|   "documentation": "https://www.home-assistant.io/integrations/actron_air", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "quality_scale": "bronze", | ||||
|   "requirements": ["actron-neo-api==0.1.84"] | ||||
| } | ||||
| @@ -1,78 +0,0 @@ | ||||
| rules: | ||||
|   # Bronze | ||||
|   action-setup: | ||||
|     status: exempt | ||||
|     comment: This integration does not have custom service actions. | ||||
|   appropriate-polling: done | ||||
|   brands: done | ||||
|   common-modules: done | ||||
|   config-flow-test-coverage: done | ||||
|   config-flow: done | ||||
|   dependency-transparency: done | ||||
|   docs-actions: | ||||
|     status: exempt | ||||
|     comment: This integration does not have custom service actions. | ||||
|   docs-high-level-description: done | ||||
|   docs-installation-instructions: done | ||||
|   docs-removal-instructions: done | ||||
|   entity-event-setup: | ||||
|     status: exempt | ||||
|     comment: This integration does not subscribe to external events. | ||||
|   entity-unique-id: done | ||||
|   has-entity-name: done | ||||
|   runtime-data: done | ||||
|   test-before-configure: done | ||||
|   test-before-setup: done | ||||
|   unique-config-entry: done | ||||
|  | ||||
|   # Silver | ||||
|   action-exceptions: todo | ||||
|   config-entry-unloading: done | ||||
|   docs-configuration-parameters: | ||||
|     status: exempt | ||||
|     comment: No options flow | ||||
|   docs-installation-parameters: done | ||||
|   entity-unavailable: done | ||||
|   integration-owner: done | ||||
|   log-when-unavailable: done | ||||
|   parallel-updates: done | ||||
|   reauthentication-flow: todo | ||||
|   test-coverage: todo | ||||
|  | ||||
|   # Gold | ||||
|   devices: done | ||||
|   diagnostics: todo | ||||
|   discovery-update-info: | ||||
|     status: exempt | ||||
|     comment: This integration uses DHCP discovery, however is cloud polling. Therefore there is no information to update. | ||||
|   discovery: done | ||||
|   docs-data-update: done | ||||
|   docs-examples: done | ||||
|   docs-known-limitations: done | ||||
|   docs-supported-devices: done | ||||
|   docs-supported-functions: done | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   dynamic-devices: todo | ||||
|   entity-category: | ||||
|     status: exempt | ||||
|     comment: This integration does not use entity categories. | ||||
|   entity-device-class: | ||||
|     status: exempt | ||||
|     comment: This integration does not use entity device classes. | ||||
|   entity-disabled-by-default: | ||||
|     status: exempt | ||||
|     comment: Not required for this integration at this stage. | ||||
|   entity-translations: todo | ||||
|   exception-translations: todo | ||||
|   icon-translations: todo | ||||
|   reconfiguration-flow: todo | ||||
|   repair-issues: | ||||
|     status: exempt | ||||
|     comment: This integration does not have any known issues that require repair. | ||||
|   stale-devices: todo | ||||
|  | ||||
|   # Platinum | ||||
|   async-dependency: done | ||||
|   inject-websession: todo | ||||
|   strict-typing: todo | ||||
| @@ -1,29 +0,0 @@ | ||||
| { | ||||
|   "config": { | ||||
|     "step": { | ||||
|       "user": { | ||||
|         "title": "Actron Air OAuth2 Authorization" | ||||
|       }, | ||||
|       "timeout": { | ||||
|         "title": "Authorization timeout", | ||||
|         "description": "The authorization process timed out. Please try again.", | ||||
|         "data": {} | ||||
|       }, | ||||
|       "connection_error": { | ||||
|         "title": "Connection error", | ||||
|         "description": "Failed to connect to Actron Air. Please check your internet connection and try again.", | ||||
|         "data": {} | ||||
|       } | ||||
|     }, | ||||
|     "progress": { | ||||
|       "wait_for_authorization": "To authenticate, open the following URL and login at Actron Air:\n{verification_uri}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{user_code}```\n\n\nThe login attempt will time out after {expires_minutes} minutes." | ||||
|     }, | ||||
|     "error": { | ||||
|       "oauth2_error": "Failed to start OAuth2 flow. Please try again later." | ||||
|     }, | ||||
|     "abort": { | ||||
|       "oauth2_error": "Failed to start OAuth2 flow", | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" | ||||
|     } | ||||
|   } | ||||
| } | ||||
| @@ -6,5 +6,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/adax", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["adax", "adax_local"], | ||||
|   "requirements": ["adax==0.4.0", "Adax-local==0.2.0"] | ||||
|   "requirements": ["adax==0.4.0", "Adax-local==0.1.5"] | ||||
| } | ||||
|   | ||||
| @@ -71,14 +71,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="user", | ||||
|             data_schema=schema, | ||||
|             errors=errors, | ||||
|             description_placeholders={ | ||||
|                 "api_key_url": "https://opendata.aemet.es/centrodedescargas/altaUsuario" | ||||
|             }, | ||||
|         ) | ||||
|         return self.async_show_form(step_id="user", data_schema=schema, errors=errors) | ||||
|  | ||||
|     @staticmethod | ||||
|     @callback | ||||
|   | ||||
| @@ -14,7 +14,7 @@ | ||||
|           "longitude": "[%key:common::config_flow::data::longitude%]", | ||||
|           "name": "Name of the integration" | ||||
|         }, | ||||
|         "description": "To generate API key go to {api_key_url}" | ||||
|         "description": "To generate API key go to https://opendata.aemet.es/centrodedescargas/altaUsuario" | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   | ||||
| @@ -53,6 +53,9 @@ __all__ = [ | ||||
|     "GenImageTaskResult", | ||||
|     "async_generate_data", | ||||
|     "async_generate_image", | ||||
|     "async_setup", | ||||
|     "async_setup_entry", | ||||
|     "async_unload_entry", | ||||
| ] | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|   | ||||
| @@ -1,9 +1,7 @@ | ||||
| """Airgradient Update platform.""" | ||||
|  | ||||
| from datetime import timedelta | ||||
| import logging | ||||
|  | ||||
| from airgradient import AirGradientConnectionError | ||||
| from propcache.api import cached_property | ||||
|  | ||||
| from homeassistant.components.update import UpdateDeviceClass, UpdateEntity | ||||
| @@ -15,7 +13,6 @@ from .entity import AirGradientEntity | ||||
|  | ||||
| PARALLEL_UPDATES = 1 | ||||
| SCAN_INTERVAL = timedelta(hours=1) | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| async def async_setup_entry( | ||||
| @@ -34,7 +31,6 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): | ||||
|     """Representation of Airgradient Update.""" | ||||
|  | ||||
|     _attr_device_class = UpdateDeviceClass.FIRMWARE | ||||
|     _server_unreachable_logged = False | ||||
|  | ||||
|     def __init__(self, coordinator: AirGradientCoordinator) -> None: | ||||
|         """Initialize the entity.""" | ||||
| @@ -51,27 +47,10 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): | ||||
|         """Return the installed version of the entity.""" | ||||
|         return self.coordinator.data.measures.firmware_version | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return super().available and self._attr_available | ||||
|  | ||||
|     async def async_update(self) -> None: | ||||
|         """Update the entity.""" | ||||
|         try: | ||||
|             self._attr_latest_version = ( | ||||
|                 await self.coordinator.client.get_latest_firmware_version( | ||||
|                     self.coordinator.serial_number | ||||
|                 ) | ||||
|         self._attr_latest_version = ( | ||||
|             await self.coordinator.client.get_latest_firmware_version( | ||||
|                 self.coordinator.serial_number | ||||
|             ) | ||||
|         except AirGradientConnectionError: | ||||
|             self._attr_latest_version = None | ||||
|             self._attr_available = False | ||||
|             if not self._server_unreachable_logged: | ||||
|                 _LOGGER.error( | ||||
|                     "Unable to connect to AirGradient server to check for updates" | ||||
|                 ) | ||||
|                 self._server_unreachable_logged = True | ||||
|         else: | ||||
|             self._server_unreachable_logged = False | ||||
|             self._attr_available = True | ||||
|         ) | ||||
|   | ||||
| @@ -18,10 +18,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
|  | ||||
| from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS | ||||
|  | ||||
| DESCRIPTION_PLACEHOLDERS = { | ||||
|     "developer_registration_url": "https://developer.airly.eu/register", | ||||
| } | ||||
|  | ||||
|  | ||||
| class AirlyFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|     """Config flow for Airly.""" | ||||
| @@ -89,7 +85,6 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|                 } | ||||
|             ), | ||||
|             errors=errors, | ||||
|             description_placeholders=DESCRIPTION_PLACEHOLDERS, | ||||
|         ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|   "config": { | ||||
|     "step": { | ||||
|       "user": { | ||||
|         "description": "To generate API key go to {developer_registration_url}", | ||||
|         "description": "To generate API key go to https://developer.airly.eu/register", | ||||
|         "data": { | ||||
|           "name": "[%key:common::config_flow::data::name%]", | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||
|   | ||||
| @@ -26,10 +26,6 @@ from .const import DOMAIN | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # Documentation URL for API key generation | ||||
| _API_KEY_URL = "https://docs.airnowapi.org/account/request/" | ||||
|  | ||||
|  | ||||
| async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool: | ||||
|     """Validate the user input allows us to connect. | ||||
|  | ||||
| @@ -118,7 +114,6 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                     ), | ||||
|                 } | ||||
|             ), | ||||
|             description_placeholders={"api_key_url": _API_KEY_URL}, | ||||
|             errors=errors, | ||||
|         ) | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|   "config": { | ||||
|     "step": { | ||||
|       "user": { | ||||
|         "description": "To generate API key go to {api_key_url}", | ||||
|         "description": "To generate API key go to https://docs.airnowapi.org/account/request/", | ||||
|         "data": { | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||
|           "latitude": "[%key:common::config_flow::data::latitude%]", | ||||
|   | ||||
| @@ -2,23 +2,12 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import logging | ||||
|  | ||||
| from airos.airos8 import AirOS8 | ||||
|  | ||||
| from homeassistant.const import ( | ||||
|     CONF_HOST, | ||||
|     CONF_PASSWORD, | ||||
|     CONF_SSL, | ||||
|     CONF_USERNAME, | ||||
|     CONF_VERIFY_SSL, | ||||
|     Platform, | ||||
| ) | ||||
| from homeassistant.core import HomeAssistant, callback | ||||
| from homeassistant.helpers import device_registry as dr, entity_registry as er | ||||
| from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
|  | ||||
| from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS | ||||
| from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator | ||||
|  | ||||
| _PLATFORMS: list[Platform] = [ | ||||
| @@ -26,24 +15,19 @@ _PLATFORMS: list[Platform] = [ | ||||
|     Platform.SENSOR, | ||||
| ] | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||
|     """Set up Ubiquiti airOS from a config entry.""" | ||||
|  | ||||
|     # By default airOS 8 comes with self-signed SSL certificates, | ||||
|     # with no option in the web UI to change or upload a custom certificate. | ||||
|     session = async_get_clientsession( | ||||
|         hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] | ||||
|     ) | ||||
|     session = async_get_clientsession(hass, verify_ssl=False) | ||||
|  | ||||
|     airos_device = AirOS8( | ||||
|         host=entry.data[CONF_HOST], | ||||
|         username=entry.data[CONF_USERNAME], | ||||
|         password=entry.data[CONF_PASSWORD], | ||||
|         session=session, | ||||
|         use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL], | ||||
|     ) | ||||
|  | ||||
|     coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device) | ||||
| @@ -56,77 +40,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||
|     """Migrate old config entry.""" | ||||
|  | ||||
|     # This means the user has downgraded from a future version | ||||
|     if entry.version > 2: | ||||
|         return False | ||||
|  | ||||
|     # 1.1 Migrate config_entry to add advanced ssl settings | ||||
|     if entry.version == 1 and entry.minor_version == 1: | ||||
|         new_minor_version = 2 | ||||
|         new_data = {**entry.data} | ||||
|         advanced_data = { | ||||
|             CONF_SSL: DEFAULT_SSL, | ||||
|             CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL, | ||||
|         } | ||||
|         new_data[SECTION_ADVANCED_SETTINGS] = advanced_data | ||||
|  | ||||
|         hass.config_entries.async_update_entry( | ||||
|             entry, | ||||
|             data=new_data, | ||||
|             minor_version=new_minor_version, | ||||
|         ) | ||||
|  | ||||
|     # 2.1 Migrate binary_sensor entity unique_id from device_id to mac_address | ||||
|     #     Step 1 - migrate binary_sensor entity unique_id | ||||
|     #     Step 2 - migrate device entity identifier | ||||
|     if entry.version == 1: | ||||
|         new_version = 2 | ||||
|         new_minor_version = 1 | ||||
|  | ||||
|         mac_adress = dr.format_mac(entry.unique_id) | ||||
|  | ||||
|         device_registry = dr.async_get(hass) | ||||
|         if device_entry := device_registry.async_get_device( | ||||
|             connections={(dr.CONNECTION_NETWORK_MAC, mac_adress)} | ||||
|         ): | ||||
|             old_device_id = next( | ||||
|                 ( | ||||
|                     device_id | ||||
|                     for domain, device_id in device_entry.identifiers | ||||
|                     if domain == DOMAIN | ||||
|                 ), | ||||
|             ) | ||||
|  | ||||
|             @callback | ||||
|             def update_unique_id( | ||||
|                 entity_entry: er.RegistryEntry, | ||||
|             ) -> dict[str, str] | None: | ||||
|                 """Update unique id from device_id to mac address.""" | ||||
|                 if old_device_id and entity_entry.unique_id.startswith(old_device_id): | ||||
|                     suffix = entity_entry.unique_id.removeprefix(old_device_id) | ||||
|                     new_unique_id = f"{mac_adress}{suffix}" | ||||
|                     return {"new_unique_id": new_unique_id} | ||||
|                 return None | ||||
|  | ||||
|             await er.async_migrate_entries(hass, entry.entry_id, update_unique_id) | ||||
|  | ||||
|             new_identifiers = device_entry.identifiers.copy() | ||||
|             new_identifiers.discard((DOMAIN, old_device_id)) | ||||
|             new_identifiers.add((DOMAIN, mac_adress)) | ||||
|             device_registry.async_update_device( | ||||
|                 device_entry.id, new_identifiers=new_identifiers | ||||
|             ) | ||||
|  | ||||
|         hass.config_entries.async_update_entry( | ||||
|             entry, version=new_version, minor_version=new_minor_version | ||||
|         ) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool: | ||||
|     """Unload a config entry.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) | ||||
|   | ||||
| @@ -98,7 +98,7 @@ class AirOSBinarySensor(AirOSEntity, BinarySensorEntity): | ||||
|         super().__init__(coordinator) | ||||
|  | ||||
|         self.entity_description = description | ||||
|         self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}" | ||||
|         self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}" | ||||
|  | ||||
|     @property | ||||
|     def is_on(self) -> bool: | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Mapping | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| @@ -15,28 +14,11 @@ from airos.exceptions import ( | ||||
| ) | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.config_entries import ( | ||||
|     SOURCE_REAUTH, | ||||
|     SOURCE_RECONFIGURE, | ||||
|     ConfigFlow, | ||||
|     ConfigFlowResult, | ||||
| ) | ||||
| from homeassistant.const import ( | ||||
|     CONF_HOST, | ||||
|     CONF_PASSWORD, | ||||
|     CONF_SSL, | ||||
|     CONF_USERNAME, | ||||
|     CONF_VERIFY_SSL, | ||||
| ) | ||||
| from homeassistant.data_entry_flow import section | ||||
| from homeassistant.config_entries import ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.selector import ( | ||||
|     TextSelector, | ||||
|     TextSelectorConfig, | ||||
|     TextSelectorType, | ||||
| ) | ||||
|  | ||||
| from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS | ||||
| from .const import DOMAIN | ||||
| from .coordinator import AirOS8 | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
| @@ -46,15 +28,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|         vol.Required(CONF_HOST): str, | ||||
|         vol.Required(CONF_USERNAME, default="ubnt"): str, | ||||
|         vol.Required(CONF_PASSWORD): str, | ||||
|         vol.Required(SECTION_ADVANCED_SETTINGS): section( | ||||
|             vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(CONF_SSL, default=DEFAULT_SSL): bool, | ||||
|                     vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool, | ||||
|                 } | ||||
|             ), | ||||
|             {"collapsed": True}, | ||||
|         ), | ||||
|     } | ||||
| ) | ||||
|  | ||||
| @@ -62,161 +35,48 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
| class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Ubiquiti airOS.""" | ||||
|  | ||||
|     VERSION = 2 | ||||
|     MINOR_VERSION = 1 | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize the config flow.""" | ||||
|         super().__init__() | ||||
|         self.airos_device: AirOS8 | ||||
|         self.errors: dict[str, str] = {} | ||||
|     VERSION = 1 | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|         self, | ||||
|         user_input: dict[str, Any] | None = None, | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the manual input of host and credentials.""" | ||||
|         self.errors = {} | ||||
|         """Handle the initial step.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         if user_input is not None: | ||||
|             validated_info = await self._validate_and_get_device_info(user_input) | ||||
|             if validated_info: | ||||
|                 return self.async_create_entry( | ||||
|                     title=validated_info["title"], | ||||
|                     data=validated_info["data"], | ||||
|                 ) | ||||
|         return self.async_show_form( | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors | ||||
|         ) | ||||
|             # By default airOS 8 comes with self-signed SSL certificates, | ||||
|             # with no option in the web UI to change or upload a custom certificate. | ||||
|             session = async_get_clientsession(self.hass, verify_ssl=False) | ||||
|  | ||||
|     async def _validate_and_get_device_info( | ||||
|         self, config_data: dict[str, Any] | ||||
|     ) -> dict[str, Any] | None: | ||||
|         """Validate user input with the device API.""" | ||||
|         # By default airOS 8 comes with self-signed SSL certificates, | ||||
|         # with no option in the web UI to change or upload a custom certificate. | ||||
|         session = async_get_clientsession( | ||||
|             self.hass, | ||||
|             verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL], | ||||
|         ) | ||||
|             airos_device = AirOS8( | ||||
|                 host=user_input[CONF_HOST], | ||||
|                 username=user_input[CONF_USERNAME], | ||||
|                 password=user_input[CONF_PASSWORD], | ||||
|                 session=session, | ||||
|             ) | ||||
|             try: | ||||
|                 await airos_device.login() | ||||
|                 airos_data = await airos_device.status() | ||||
|  | ||||
|         airos_device = AirOS8( | ||||
|             host=config_data[CONF_HOST], | ||||
|             username=config_data[CONF_USERNAME], | ||||
|             password=config_data[CONF_PASSWORD], | ||||
|             session=session, | ||||
|             use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL], | ||||
|         ) | ||||
|         try: | ||||
|             await airos_device.login() | ||||
|             airos_data = await airos_device.status() | ||||
|  | ||||
|         except ( | ||||
|             AirOSConnectionSetupError, | ||||
|             AirOSDeviceConnectionError, | ||||
|         ): | ||||
|             self.errors["base"] = "cannot_connect" | ||||
|         except (AirOSConnectionAuthenticationError, AirOSDataMissingError): | ||||
|             self.errors["base"] = "invalid_auth" | ||||
|         except AirOSKeyDataMissingError: | ||||
|             self.errors["base"] = "key_data_missing" | ||||
|         except Exception: | ||||
|             _LOGGER.exception("Unexpected exception during credential validation") | ||||
|             self.errors["base"] = "unknown" | ||||
|         else: | ||||
|             await self.async_set_unique_id(airos_data.derived.mac) | ||||
|  | ||||
|             if self.source in [SOURCE_REAUTH, SOURCE_RECONFIGURE]: | ||||
|                 self._abort_if_unique_id_mismatch() | ||||
|             except ( | ||||
|                 AirOSConnectionSetupError, | ||||
|                 AirOSDeviceConnectionError, | ||||
|             ): | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except (AirOSConnectionAuthenticationError, AirOSDataMissingError): | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except AirOSKeyDataMissingError: | ||||
|                 errors["base"] = "key_data_missing" | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unexpected exception") | ||||
|                 errors["base"] = "unknown" | ||||
|             else: | ||||
|                 await self.async_set_unique_id(airos_data.derived.mac) | ||||
|                 self._abort_if_unique_id_configured() | ||||
|  | ||||
|             return {"title": airos_data.host.hostname, "data": config_data} | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any], | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauthentication upon an API authentication error.""" | ||||
|         return await self.async_step_reauth_confirm(user_input) | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any], | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauthentication upon an API authentication error.""" | ||||
|         self.errors = {} | ||||
|  | ||||
|         if user_input: | ||||
|             validate_data = {**self._get_reauth_entry().data, **user_input} | ||||
|             if await self._validate_and_get_device_info(config_data=validate_data): | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     self._get_reauth_entry(), | ||||
|                     data_updates=validate_data, | ||||
|                 return self.async_create_entry( | ||||
|                     title=airos_data.host.hostname, data=user_input | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(CONF_PASSWORD): TextSelector( | ||||
|                         TextSelectorConfig( | ||||
|                             type=TextSelectorType.PASSWORD, | ||||
|                             autocomplete="current-password", | ||||
|                         ) | ||||
|                     ), | ||||
|                 } | ||||
|             ), | ||||
|             errors=self.errors, | ||||
|         ) | ||||
|  | ||||
|     async def async_step_reconfigure( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any] | None = None, | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle reconfiguration of airOS.""" | ||||
|         self.errors = {} | ||||
|         entry = self._get_reconfigure_entry() | ||||
|         current_data = entry.data | ||||
|  | ||||
|         if user_input is not None: | ||||
|             validate_data = {**current_data, **user_input} | ||||
|             if await self._validate_and_get_device_info(config_data=validate_data): | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     entry, | ||||
|                     data_updates=validate_data, | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reconfigure", | ||||
|             data_schema=vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(CONF_PASSWORD): TextSelector( | ||||
|                         TextSelectorConfig( | ||||
|                             type=TextSelectorType.PASSWORD, | ||||
|                             autocomplete="current-password", | ||||
|                         ) | ||||
|                     ), | ||||
|                     vol.Required(SECTION_ADVANCED_SETTINGS): section( | ||||
|                         vol.Schema( | ||||
|                             { | ||||
|                                 vol.Required( | ||||
|                                     CONF_SSL, | ||||
|                                     default=current_data[SECTION_ADVANCED_SETTINGS][ | ||||
|                                         CONF_SSL | ||||
|                                     ], | ||||
|                                 ): bool, | ||||
|                                 vol.Required( | ||||
|                                     CONF_VERIFY_SSL, | ||||
|                                     default=current_data[SECTION_ADVANCED_SETTINGS][ | ||||
|                                         CONF_VERIFY_SSL | ||||
|                                     ], | ||||
|                                 ): bool, | ||||
|                             } | ||||
|                         ), | ||||
|                         {"collapsed": True}, | ||||
|                     ), | ||||
|                 } | ||||
|             ), | ||||
|             errors=self.errors, | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|   | ||||
| @@ -7,8 +7,3 @@ DOMAIN = "airos" | ||||
| SCAN_INTERVAL = timedelta(minutes=1) | ||||
|  | ||||
| MANUFACTURER = "Ubiquiti" | ||||
|  | ||||
| DEFAULT_VERIFY_SSL = False | ||||
| DEFAULT_SSL = True | ||||
|  | ||||
| SECTION_ADVANCED_SETTINGS = "advanced_settings" | ||||
|   | ||||
| @@ -14,7 +14,7 @@ from airos.exceptions import ( | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed | ||||
| from homeassistant.exceptions import ConfigEntryError | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed | ||||
|  | ||||
| from .const import DOMAIN, SCAN_INTERVAL | ||||
| @@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]): | ||||
|         try: | ||||
|             await self.airos_device.login() | ||||
|             return await self.airos_device.status() | ||||
|         except AirOSConnectionAuthenticationError as err: | ||||
|         except (AirOSConnectionAuthenticationError,) as err: | ||||
|             _LOGGER.exception("Error authenticating with airOS device") | ||||
|             raise ConfigEntryAuthFailed( | ||||
|             raise ConfigEntryError( | ||||
|                 translation_domain=DOMAIN, translation_key="invalid_auth" | ||||
|             ) from err | ||||
|         except ( | ||||
|   | ||||
| @@ -2,11 +2,11 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from homeassistant.const import CONF_HOST, CONF_SSL | ||||
| from homeassistant.const import CONF_HOST | ||||
| from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo | ||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity | ||||
|  | ||||
| from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS | ||||
| from .const import DOMAIN, MANUFACTURER | ||||
| from .coordinator import AirOSDataUpdateCoordinator | ||||
|  | ||||
|  | ||||
| @@ -20,27 +20,17 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]): | ||||
|         super().__init__(coordinator) | ||||
|  | ||||
|         airos_data = self.coordinator.data | ||||
|         url_schema = ( | ||||
|             "https" | ||||
|             if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] | ||||
|             else "http" | ||||
|         ) | ||||
|  | ||||
|         configuration_url: str | None = ( | ||||
|             f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}" | ||||
|             f"https://{coordinator.config_entry.data[CONF_HOST]}" | ||||
|         ) | ||||
|  | ||||
|         self._attr_device_info = DeviceInfo( | ||||
|             connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)}, | ||||
|             configuration_url=configuration_url, | ||||
|             identifiers={(DOMAIN, airos_data.derived.mac)}, | ||||
|             identifiers={(DOMAIN, str(airos_data.host.device_id))}, | ||||
|             manufacturer=MANUFACTURER, | ||||
|             model=airos_data.host.devmodel, | ||||
|             model_id=( | ||||
|                 sku | ||||
|                 if (sku := airos_data.derived.sku) not in ["UNKNOWN", "AMBIGUOUS"] | ||||
|                 else None | ||||
|             ), | ||||
|             name=airos_data.host.hostname, | ||||
|             sw_version=airos_data.host.fwversion, | ||||
|         ) | ||||
|   | ||||
| @@ -4,8 +4,7 @@ | ||||
|   "codeowners": ["@CoMPaTech"], | ||||
|   "config_flow": true, | ||||
|   "documentation": "https://www.home-assistant.io/integrations/airos", | ||||
|   "integration_type": "device", | ||||
|   "iot_class": "local_polling", | ||||
|   "quality_scale": "silver", | ||||
|   "requirements": ["airos==0.6.0"] | ||||
|   "quality_scale": "bronze", | ||||
|   "requirements": ["airos==0.5.1"] | ||||
| } | ||||
|   | ||||
| @@ -32,11 +32,11 @@ rules: | ||||
|   config-entry-unloading: done | ||||
|   docs-configuration-parameters: done | ||||
|   docs-installation-parameters: done | ||||
|   entity-unavailable: done | ||||
|   entity-unavailable: todo | ||||
|   integration-owner: done | ||||
|   log-when-unavailable: done | ||||
|   parallel-updates: done | ||||
|   reauthentication-flow: done | ||||
|   log-when-unavailable: todo | ||||
|   parallel-updates: todo | ||||
|   reauthentication-flow: todo | ||||
|   test-coverage: done | ||||
|  | ||||
|   # Gold | ||||
| @@ -48,9 +48,9 @@ rules: | ||||
|   docs-examples: todo | ||||
|   docs-known-limitations: done | ||||
|   docs-supported-devices: done | ||||
|   docs-supported-functions: done | ||||
|   docs-supported-functions: todo | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   docs-use-cases: todo | ||||
|   dynamic-devices: todo | ||||
|   entity-category: done | ||||
|   entity-device-class: done | ||||
| @@ -60,7 +60,7 @@ rules: | ||||
|   icon-translations: | ||||
|     status: exempt | ||||
|     comment: no (custom) icons used or envisioned | ||||
|   reconfiguration-flow: done | ||||
|   reconfiguration-flow: todo | ||||
|   repair-issues: todo | ||||
|   stale-devices: todo | ||||
|  | ||||
|   | ||||
| @@ -2,35 +2,6 @@ | ||||
|   "config": { | ||||
|     "flow_title": "Ubiquiti airOS device", | ||||
|     "step": { | ||||
|       "reauth_confirm": { | ||||
|         "data": { | ||||
|           "password": "[%key:common::config_flow::data::password%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "password": "[%key:component::airos::config::step::user::data_description::password%]" | ||||
|         } | ||||
|       }, | ||||
|       "reconfigure": { | ||||
|         "data": { | ||||
|           "password": "[%key:common::config_flow::data::password%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "password": "[%key:component::airos::config::step::user::data_description::password%]" | ||||
|         }, | ||||
|         "sections": { | ||||
|           "advanced_settings": { | ||||
|             "name": "[%key:component::airos::config::step::user::sections::advanced_settings::name%]", | ||||
|             "data": { | ||||
|               "ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data::ssl%]", | ||||
|               "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" | ||||
|             }, | ||||
|             "data_description": { | ||||
|               "ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::ssl%]", | ||||
|               "verify_ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::verify_ssl%]" | ||||
|             } | ||||
|           } | ||||
|         } | ||||
|       }, | ||||
|       "user": { | ||||
|         "data": { | ||||
|           "host": "[%key:common::config_flow::data::host%]", | ||||
| @@ -41,19 +12,6 @@ | ||||
|           "host": "IP address or hostname of the airOS device", | ||||
|           "username": "Administrator username for the airOS device, normally 'ubnt'", | ||||
|           "password": "Password configured through the UISP app or web interface" | ||||
|         }, | ||||
|         "sections": { | ||||
|           "advanced_settings": { | ||||
|             "name": "Advanced settings", | ||||
|             "data": { | ||||
|               "ssl": "Use HTTPS", | ||||
|               "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" | ||||
|             }, | ||||
|             "data_description": { | ||||
|               "ssl": "Whether the connection should be encrypted (required for most devices)", | ||||
|               "verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates" | ||||
|             } | ||||
|           } | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
| @@ -64,10 +22,7 @@ | ||||
|       "unknown": "[%key:common::config_flow::error::unknown%]" | ||||
|     }, | ||||
|     "abort": { | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", | ||||
|       "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", | ||||
|       "unique_id_mismatch": "Re-authentication should be used for the same device not a new one" | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
|   | ||||
| @@ -7,5 +7,5 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aioairq"], | ||||
|   "requirements": ["aioairq==0.4.7"] | ||||
|   "requirements": ["aioairq==0.4.6"] | ||||
| } | ||||
|   | ||||
| @@ -29,7 +29,7 @@ | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "return_average": "air-Q allows to poll both the noisy sensor readings as well as the values averaged on the device (default)", | ||||
|           "clip_negatives": "For baseline calibration purposes, certain sensor values may briefly become negative. The default behavior is to clip such values to 0" | ||||
|           "clip_negatives": "For baseline calibration purposes, certain sensor values may briefly become negative. The default behaviour is to clip such values to 0" | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   | ||||
| @@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|     } | ||||
| ) | ||||
|  | ||||
| URL_API_INTEGRATION = { | ||||
|     "url": "https://dashboard.airthings.com/integrations/api-integration" | ||||
| } | ||||
|  | ||||
|  | ||||
| class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Airthings.""" | ||||
| @@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_show_form( | ||||
|                 step_id="user", | ||||
|                 data_schema=STEP_USER_DATA_SCHEMA, | ||||
|                 description_placeholders=URL_API_INTEGRATION, | ||||
|                 description_placeholders={ | ||||
|                     "url": ( | ||||
|                         "https://dashboard.airthings.com/integrations/api-integration" | ||||
|                     ), | ||||
|                 }, | ||||
|             ) | ||||
|  | ||||
|         errors = {} | ||||
| @@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_create_entry(title="Airthings", data=user_input) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="user", | ||||
|             data_schema=STEP_USER_DATA_SCHEMA, | ||||
|             errors=errors, | ||||
|             description_placeholders=URL_API_INTEGRATION, | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|   | ||||
| @@ -4,9 +4,9 @@ | ||||
|       "user": { | ||||
|         "data": { | ||||
|           "id": "ID", | ||||
|           "secret": "Secret" | ||||
|         }, | ||||
|         "description": "Log in at {url} to find your credentials" | ||||
|           "secret": "Secret", | ||||
|           "description": "Login at {url} to find your credentials" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
|     "error": { | ||||
|   | ||||
| @@ -6,13 +6,8 @@ import dataclasses | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from airthings_ble import ( | ||||
|     AirthingsBluetoothDeviceData, | ||||
|     AirthingsDevice, | ||||
|     UnsupportedDeviceError, | ||||
| ) | ||||
| from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice | ||||
| from bleak import BleakError | ||||
| from habluetooth import BluetoothServiceInfoBleak | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.components import bluetooth | ||||
| @@ -32,7 +27,6 @@ SERVICE_UUIDS = [ | ||||
|     "b42e4a8e-ade7-11e4-89d3-123b93f75cba", | ||||
|     "b42e1c08-ade7-11e4-89d3-123b93f75cba", | ||||
|     "b42e3882-ade7-11e4-89d3-123b93f75cba", | ||||
|     "b42e90a2-ade7-11e4-89d3-123b93f75cba", | ||||
| ] | ||||
|  | ||||
|  | ||||
| @@ -43,7 +37,6 @@ class Discovery: | ||||
|     name: str | ||||
|     discovery_info: BluetoothServiceInfo | ||||
|     device: AirthingsDevice | ||||
|     data: AirthingsBluetoothDeviceData | ||||
|  | ||||
|  | ||||
| def get_name(device: AirthingsDevice) -> str: | ||||
| @@ -51,7 +44,7 @@ def get_name(device: AirthingsDevice) -> str: | ||||
|  | ||||
|     name = device.friendly_name() | ||||
|     if identifier := device.identifier: | ||||
|         name += f" ({device.model.value}{identifier})" | ||||
|         name += f" ({identifier})" | ||||
|     return name | ||||
|  | ||||
|  | ||||
| @@ -69,8 +62,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|         self._discovered_device: Discovery | None = None | ||||
|         self._discovered_devices: dict[str, Discovery] = {} | ||||
|  | ||||
|     async def _get_device( | ||||
|         self, data: AirthingsBluetoothDeviceData, discovery_info: BluetoothServiceInfo | ||||
|     async def _get_device_data( | ||||
|         self, discovery_info: BluetoothServiceInfo | ||||
|     ) -> AirthingsDevice: | ||||
|         ble_device = bluetooth.async_ble_device_from_address( | ||||
|             self.hass, discovery_info.address | ||||
| @@ -79,8 +72,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             _LOGGER.debug("no ble_device in _get_device_data") | ||||
|             raise AirthingsDeviceUpdateError("No ble_device") | ||||
|  | ||||
|         airthings = AirthingsBluetoothDeviceData(_LOGGER) | ||||
|  | ||||
|         try: | ||||
|             device = await data.update_device(ble_device) | ||||
|             data = await airthings.update_device(ble_device) | ||||
|         except BleakError as err: | ||||
|             _LOGGER.error( | ||||
|                 "Error connecting to and getting data from %s: %s", | ||||
| @@ -88,15 +83,12 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 err, | ||||
|             ) | ||||
|             raise AirthingsDeviceUpdateError("Failed getting device data") from err | ||||
|         except UnsupportedDeviceError: | ||||
|             _LOGGER.debug("Skipping unsupported device: %s", discovery_info.name) | ||||
|             raise | ||||
|         except Exception as err: | ||||
|             _LOGGER.error( | ||||
|                 "Unknown error occurred from %s: %s", discovery_info.address, err | ||||
|             ) | ||||
|             raise | ||||
|         return device | ||||
|         return data | ||||
|  | ||||
|     async def async_step_bluetooth( | ||||
|         self, discovery_info: BluetoothServiceInfo | ||||
| @@ -106,21 +98,17 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|         await self.async_set_unique_id(discovery_info.address) | ||||
|         self._abort_if_unique_id_configured() | ||||
|  | ||||
|         data = AirthingsBluetoothDeviceData(logger=_LOGGER) | ||||
|  | ||||
|         try: | ||||
|             device = await self._get_device(data=data, discovery_info=discovery_info) | ||||
|             device = await self._get_device_data(discovery_info) | ||||
|         except AirthingsDeviceUpdateError: | ||||
|             return self.async_abort(reason="cannot_connect") | ||||
|         except UnsupportedDeviceError: | ||||
|             return self.async_abort(reason="unsupported_device") | ||||
|         except Exception: | ||||
|             _LOGGER.exception("Unknown error occurred") | ||||
|             return self.async_abort(reason="unknown") | ||||
|  | ||||
|         name = get_name(device) | ||||
|         self.context["title_placeholders"] = {"name": name} | ||||
|         self._discovered_device = Discovery(name, discovery_info, device, data=data) | ||||
|         self._discovered_device = Discovery(name, discovery_info, device) | ||||
|  | ||||
|         return await self.async_step_bluetooth_confirm() | ||||
|  | ||||
| @@ -129,12 +117,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Confirm discovery.""" | ||||
|         if user_input is not None: | ||||
|             if ( | ||||
|                 self._discovered_device is not None | ||||
|                 and self._discovered_device.device.firmware.need_firmware_upgrade | ||||
|             ): | ||||
|                 return self.async_abort(reason="firmware_upgrade_required") | ||||
|  | ||||
|             return self.async_create_entry( | ||||
|                 title=self.context["title_placeholders"]["name"], data={} | ||||
|             ) | ||||
| @@ -155,9 +137,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             self._abort_if_unique_id_configured() | ||||
|             discovery = self._discovered_devices[address] | ||||
|  | ||||
|             if discovery.device.firmware.need_firmware_upgrade: | ||||
|                 return self.async_abort(reason="firmware_upgrade_required") | ||||
|  | ||||
|             self.context["title_placeholders"] = { | ||||
|                 "name": discovery.name, | ||||
|             } | ||||
| @@ -167,53 +146,32 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_create_entry(title=discovery.name, data={}) | ||||
|  | ||||
|         current_addresses = self._async_current_ids(include_ignore=False) | ||||
|         devices: list[BluetoothServiceInfoBleak] = [] | ||||
|         for discovery_info in async_discovered_service_info(self.hass): | ||||
|             address = discovery_info.address | ||||
|             if address in current_addresses or address in self._discovered_devices: | ||||
|                 continue | ||||
|  | ||||
|             if MFCT_ID not in discovery_info.manufacturer_data: | ||||
|                 continue | ||||
|             if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids): | ||||
|                 _LOGGER.debug( | ||||
|                     "Skipping unsupported device: %s (%s)", discovery_info.name, address | ||||
|                 ) | ||||
|                 continue | ||||
|             devices.append(discovery_info) | ||||
|  | ||||
|         for discovery_info in devices: | ||||
|             address = discovery_info.address | ||||
|             data = AirthingsBluetoothDeviceData(logger=_LOGGER) | ||||
|             if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids): | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 device = await self._get_device(data, discovery_info) | ||||
|                 device = await self._get_device_data(discovery_info) | ||||
|             except AirthingsDeviceUpdateError: | ||||
|                 _LOGGER.error( | ||||
|                     "Error connecting to and getting data from %s (%s)", | ||||
|                     discovery_info.name, | ||||
|                     discovery_info.address, | ||||
|                 ) | ||||
|                 continue | ||||
|             except UnsupportedDeviceError: | ||||
|                 _LOGGER.debug( | ||||
|                     "Skipping unsupported device: %s (%s)", | ||||
|                     discovery_info.name, | ||||
|                     discovery_info.address, | ||||
|                 ) | ||||
|                 continue | ||||
|                 return self.async_abort(reason="cannot_connect") | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unknown error occurred") | ||||
|                 return self.async_abort(reason="unknown") | ||||
|             name = get_name(device) | ||||
|             _LOGGER.debug("Discovered Airthings device: %s (%s)", name, address) | ||||
|             self._discovered_devices[address] = Discovery( | ||||
|                 name, discovery_info, device, data | ||||
|             ) | ||||
|             self._discovered_devices[address] = Discovery(name, discovery_info, device) | ||||
|  | ||||
|         if not self._discovered_devices: | ||||
|             return self.async_abort(reason="no_devices_found") | ||||
|  | ||||
|         titles = { | ||||
|             address: get_name(discovery.device) | ||||
|             address: discovery.device.name | ||||
|             for (address, discovery) in self._discovered_devices.items() | ||||
|         } | ||||
|         return self.async_show_form( | ||||
|   | ||||
| @@ -17,10 +17,6 @@ | ||||
|     { | ||||
|       "manufacturer_id": 820, | ||||
|       "service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba" | ||||
|     }, | ||||
|     { | ||||
|       "manufacturer_id": 820, | ||||
|       "service_uuid": "b42e90a2-ade7-11e4-89d3-123b93f75cba" | ||||
|     } | ||||
|   ], | ||||
|   "codeowners": ["@vincegio", "@LaStrada"], | ||||
| @@ -28,5 +24,5 @@ | ||||
|   "dependencies": ["bluetooth_adapters"], | ||||
|   "documentation": "https://www.home-assistant.io/integrations/airthings_ble", | ||||
|   "iot_class": "local_polling", | ||||
|   "requirements": ["airthings-ble==1.1.1"] | ||||
|   "requirements": ["airthings-ble==0.9.2"] | ||||
| } | ||||
|   | ||||
| @@ -16,12 +16,10 @@ from homeassistant.components.sensor import ( | ||||
| from homeassistant.const import ( | ||||
|     CONCENTRATION_PARTS_PER_BILLION, | ||||
|     CONCENTRATION_PARTS_PER_MILLION, | ||||
|     LIGHT_LUX, | ||||
|     PERCENTAGE, | ||||
|     EntityCategory, | ||||
|     Platform, | ||||
|     UnitOfPressure, | ||||
|     UnitOfSoundPressure, | ||||
|     UnitOfTemperature, | ||||
| ) | ||||
| from homeassistant.core import HomeAssistant, callback | ||||
| @@ -114,25 +112,8 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = { | ||||
|         state_class=SensorStateClass.MEASUREMENT, | ||||
|         suggested_display_precision=0, | ||||
|     ), | ||||
|     "lux": SensorEntityDescription( | ||||
|         key="lux", | ||||
|         device_class=SensorDeviceClass.ILLUMINANCE, | ||||
|         native_unit_of_measurement=LIGHT_LUX, | ||||
|         state_class=SensorStateClass.MEASUREMENT, | ||||
|         suggested_display_precision=0, | ||||
|     ), | ||||
|     "noise": SensorEntityDescription( | ||||
|         key="noise", | ||||
|         translation_key="ambient_noise", | ||||
|         device_class=SensorDeviceClass.SOUND_PRESSURE, | ||||
|         native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A, | ||||
|         state_class=SensorStateClass.MEASUREMENT, | ||||
|         suggested_display_precision=0, | ||||
|     ), | ||||
| } | ||||
|  | ||||
| PARALLEL_UPDATES = 0 | ||||
|  | ||||
|  | ||||
| @callback | ||||
| def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: | ||||
|   | ||||
| @@ -6,9 +6,6 @@ | ||||
|         "description": "[%key:component::bluetooth::config::step::user::description%]", | ||||
|         "data": { | ||||
|           "address": "[%key:common::config_flow::data::device%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "address": "The Airthings devices discovered via Bluetooth." | ||||
|         } | ||||
|       }, | ||||
|       "bluetooth_confirm": { | ||||
| @@ -20,8 +17,6 @@ | ||||
|       "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", | ||||
|       "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", | ||||
|       "firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.", | ||||
|       "unsupported_device": "Unsupported device", | ||||
|       "unknown": "[%key:common::config_flow::error::unknown%]" | ||||
|     } | ||||
|   }, | ||||
| @@ -41,9 +36,6 @@ | ||||
|       }, | ||||
|       "illuminance": { | ||||
|         "name": "[%key:component::sensor::entity_component::illuminance::name%]" | ||||
|       }, | ||||
|       "ambient_noise": { | ||||
|         "name": "Ambient noise" | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|   | ||||
| @@ -2,14 +2,17 @@ | ||||
|  | ||||
| from airtouch4pyapi import AirTouch | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_HOST, Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryNotReady | ||||
|  | ||||
| from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator | ||||
| from .coordinator import AirtouchDataUpdateCoordinator | ||||
|  | ||||
| PLATFORMS = [Platform.CLIMATE] | ||||
|  | ||||
| type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator] | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool: | ||||
|     """Set up AirTouch4 from a config entry.""" | ||||
| @@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> | ||||
|     info = airtouch.GetAcs() | ||||
|     if not info: | ||||
|         raise ConfigEntryNotReady | ||||
|     coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch) | ||||
|     coordinator = AirtouchDataUpdateCoordinator(hass, airtouch) | ||||
|     await coordinator.async_config_entry_first_refresh() | ||||
|     entry.runtime_data = coordinator | ||||
|  | ||||
|   | ||||
| @@ -2,34 +2,26 @@ | ||||
|  | ||||
| import logging | ||||
|  | ||||
| from airtouch4pyapi import AirTouch | ||||
| from airtouch4pyapi.airtouch import AirTouchStatus | ||||
|  | ||||
| from homeassistant.components.climate import SCAN_INTERVAL | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed | ||||
|  | ||||
| from .const import DOMAIN | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator] | ||||
|  | ||||
|  | ||||
| class AirtouchDataUpdateCoordinator(DataUpdateCoordinator): | ||||
|     """Class to manage fetching Airtouch data.""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch | ||||
|     ) -> None: | ||||
|     def __init__(self, hass, airtouch): | ||||
|         """Initialize global Airtouch data updater.""" | ||||
|         self.airtouch = airtouch | ||||
|  | ||||
|         super().__init__( | ||||
|             hass, | ||||
|             _LOGGER, | ||||
|             config_entry=entry, | ||||
|             name=DOMAIN, | ||||
|             update_interval=SCAN_INTERVAL, | ||||
|         ) | ||||
|   | ||||
| @@ -11,5 +11,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/airzone", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aioairzone"], | ||||
|   "requirements": ["aioairzone==1.0.2"] | ||||
|   "requirements": ["aioairzone==1.0.1"] | ||||
| } | ||||
|   | ||||
| @@ -6,19 +6,17 @@ from collections.abc import Callable | ||||
| from dataclasses import dataclass | ||||
| from typing import Any, Final | ||||
|  | ||||
| from aioairzone.common import GrilleAngle, OperationMode, QAdapt, SleepTimeout | ||||
| from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout | ||||
| from aioairzone.const import ( | ||||
|     API_COLD_ANGLE, | ||||
|     API_HEAT_ANGLE, | ||||
|     API_MODE, | ||||
|     API_Q_ADAPT, | ||||
|     API_SLEEP, | ||||
|     AZD_COLD_ANGLE, | ||||
|     AZD_HEAT_ANGLE, | ||||
|     AZD_MASTER, | ||||
|     AZD_MODE, | ||||
|     AZD_MODES, | ||||
|     AZD_Q_ADAPT, | ||||
|     AZD_SLEEP, | ||||
|     AZD_ZONES, | ||||
| ) | ||||
| @@ -67,14 +65,6 @@ SLEEP_DICT: Final[dict[str, int]] = { | ||||
|     "90m": SleepTimeout.SLEEP_90, | ||||
| } | ||||
|  | ||||
| Q_ADAPT_DICT: Final[dict[str, int]] = { | ||||
|     "standard": QAdapt.STANDARD, | ||||
|     "power": QAdapt.POWER, | ||||
|     "silence": QAdapt.SILENCE, | ||||
|     "minimum": QAdapt.MINIMUM, | ||||
|     "maximum": QAdapt.MAXIMUM, | ||||
| } | ||||
|  | ||||
|  | ||||
| def main_zone_options( | ||||
|     zone_data: dict[str, Any], | ||||
| @@ -93,14 +83,6 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( | ||||
|         options_fn=main_zone_options, | ||||
|         translation_key="modes", | ||||
|     ), | ||||
|     AirzoneSelectDescription( | ||||
|         api_param=API_Q_ADAPT, | ||||
|         entity_category=EntityCategory.CONFIG, | ||||
|         key=AZD_Q_ADAPT, | ||||
|         options=list(Q_ADAPT_DICT), | ||||
|         options_dict=Q_ADAPT_DICT, | ||||
|         translation_key="q_adapt", | ||||
|     ), | ||||
| ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -63,16 +63,6 @@ | ||||
|           "stop": "Stop" | ||||
|         } | ||||
|       }, | ||||
|       "q_adapt": { | ||||
|         "name": "Q-Adapt", | ||||
|         "state": { | ||||
|           "standard": "Standard", | ||||
|           "power": "Power", | ||||
|           "silence": "Silence", | ||||
|           "minimum": "Minimum", | ||||
|           "maximum": "Maximum" | ||||
|         } | ||||
|       }, | ||||
|       "sleep_times": { | ||||
|         "name": "Sleep", | ||||
|         "state": { | ||||
|   | ||||
| @@ -22,17 +22,6 @@ class OAuth2FlowHandler( | ||||
|     VERSION = CONFIG_FLOW_VERSION | ||||
|     MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Check we have the cloud integration set up.""" | ||||
|         if "cloud" not in self.hass.config.components: | ||||
|             return self.async_abort( | ||||
|                 reason="cloud_not_enabled", | ||||
|                 description_placeholders={"default_config": "default_config"}, | ||||
|             ) | ||||
|         return await super().async_step_user(user_input) | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, user_input: Mapping[str, Any] | ||||
|     ) -> ConfigFlowResult: | ||||
|   | ||||
| @@ -24,8 +24,7 @@ | ||||
|       "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", | ||||
|       "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", | ||||
|       "wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.", | ||||
|       "cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml." | ||||
|       "wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account." | ||||
|     }, | ||||
|     "create_entry": { | ||||
|       "default": "[%key:common::config_flow::create_entry::authenticated%]" | ||||
|   | ||||
| @@ -2,9 +2,10 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from datetime import timedelta | ||||
| import logging | ||||
| from typing import Any, Final, final | ||||
| from typing import TYPE_CHECKING, Any, Final, final | ||||
|  | ||||
| from propcache.api import cached_property | ||||
| import voluptuous as vol | ||||
| @@ -27,6 +28,8 @@ from homeassistant.helpers import config_validation as cv | ||||
| from homeassistant.helpers.config_validation import make_entity_service_schema | ||||
| from homeassistant.helpers.entity import Entity, EntityDescription | ||||
| from homeassistant.helpers.entity_component import EntityComponent | ||||
| from homeassistant.helpers.entity_platform import EntityPlatform | ||||
| from homeassistant.helpers.frame import ReportBehavior, report_usage | ||||
| from homeassistant.helpers.typing import ConfigType | ||||
| from homeassistant.util.hass_dict import HassKey | ||||
|  | ||||
| @@ -146,11 +149,68 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A | ||||
|     ) | ||||
|     _alarm_control_panel_option_default_code: str | None = None | ||||
|  | ||||
|     __alarm_legacy_state: bool = False | ||||
|  | ||||
|     def __init_subclass__(cls, **kwargs: Any) -> None: | ||||
|         """Post initialisation processing.""" | ||||
|         super().__init_subclass__(**kwargs) | ||||
|         if any(method in cls.__dict__ for method in ("_attr_state", "state")): | ||||
|             # Integrations should use the 'alarm_state' property instead of | ||||
|             # setting the state directly. | ||||
|             cls.__alarm_legacy_state = True | ||||
|  | ||||
|     def __setattr__(self, name: str, value: Any, /) -> None: | ||||
|         """Set attribute. | ||||
|  | ||||
|         Deprecation warning if setting '_attr_state' directly | ||||
|         unless already reported. | ||||
|         """ | ||||
|         if name == "_attr_state": | ||||
|             self._report_deprecated_alarm_state_handling() | ||||
|         return super().__setattr__(name, value) | ||||
|  | ||||
|     @callback | ||||
|     def add_to_platform_start( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
|         platform: EntityPlatform, | ||||
|         parallel_updates: asyncio.Semaphore | None, | ||||
|     ) -> None: | ||||
|         """Start adding an entity to a platform.""" | ||||
|         super().add_to_platform_start(hass, platform, parallel_updates) | ||||
|         if self.__alarm_legacy_state: | ||||
|             self._report_deprecated_alarm_state_handling() | ||||
|  | ||||
|     @callback | ||||
|     def _report_deprecated_alarm_state_handling(self) -> None: | ||||
|         """Report on deprecated handling of alarm state. | ||||
|  | ||||
|         Integrations should implement alarm_state instead of using state directly. | ||||
|         """ | ||||
|         report_usage( | ||||
|             "is setting state directly." | ||||
|             f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'" | ||||
|             " property and return its state using the AlarmControlPanelState enum", | ||||
|             core_integration_behavior=ReportBehavior.ERROR, | ||||
|             custom_integration_behavior=ReportBehavior.LOG, | ||||
|             breaks_in_ha_version="2025.11", | ||||
|             integration_domain=self.platform.platform_name if self.platform else None, | ||||
|             exclude_integrations={DOMAIN}, | ||||
|         ) | ||||
|  | ||||
|     @final | ||||
|     @property | ||||
|     def state(self) -> str | None: | ||||
|         """Return the current state.""" | ||||
|         return self.alarm_state | ||||
|         if (alarm_state := self.alarm_state) is not None: | ||||
|             return alarm_state | ||||
|         if self._attr_state is not None: | ||||
|             # Backwards compatibility for integrations that set state directly | ||||
|             # Should be removed in 2025.11 | ||||
|             if TYPE_CHECKING: | ||||
|                 assert isinstance(self._attr_state, str) | ||||
|             return self._attr_state | ||||
|         return None | ||||
|  | ||||
|     @cached_property | ||||
|     def alarm_state(self) -> AlarmControlPanelState | None: | ||||
|   | ||||
| @@ -1472,10 +1472,10 @@ class AlexaModeController(AlexaCapability): | ||||
|             # Return state instead of position when using ModeController. | ||||
|             mode = self.entity.state | ||||
|             if mode in ( | ||||
|                 cover.CoverState.OPEN, | ||||
|                 cover.CoverState.OPENING, | ||||
|                 cover.CoverState.CLOSED, | ||||
|                 cover.CoverState.CLOSING, | ||||
|                 cover.STATE_OPEN, | ||||
|                 cover.STATE_OPENING, | ||||
|                 cover.STATE_CLOSED, | ||||
|                 cover.STATE_CLOSING, | ||||
|                 STATE_UNKNOWN, | ||||
|             ): | ||||
|                 return f"{cover.ATTR_POSITION}.{mode}" | ||||
| @@ -1594,11 +1594,11 @@ class AlexaModeController(AlexaCapability): | ||||
|                 ["Position", AlexaGlobalCatalog.SETTING_OPENING], False | ||||
|             ) | ||||
|             self._resource.add_mode( | ||||
|                 f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}", | ||||
|                 f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}", | ||||
|                 [AlexaGlobalCatalog.VALUE_OPEN], | ||||
|             ) | ||||
|             self._resource.add_mode( | ||||
|                 f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}", | ||||
|                 f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}", | ||||
|                 [AlexaGlobalCatalog.VALUE_CLOSE], | ||||
|             ) | ||||
|             self._resource.add_mode( | ||||
| @@ -1651,22 +1651,22 @@ class AlexaModeController(AlexaCapability): | ||||
|                 raise_labels.append(AlexaSemantics.ACTION_OPEN) | ||||
|                 self._semantics.add_states_to_value( | ||||
|                     [AlexaSemantics.STATES_CLOSED], | ||||
|                     f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}", | ||||
|                     f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}", | ||||
|                 ) | ||||
|                 self._semantics.add_states_to_value( | ||||
|                     [AlexaSemantics.STATES_OPEN], | ||||
|                     f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}", | ||||
|                     f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}", | ||||
|                 ) | ||||
|  | ||||
|             self._semantics.add_action_to_directive( | ||||
|                 lower_labels, | ||||
|                 "SetMode", | ||||
|                 {"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}"}, | ||||
|                 {"mode": f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}"}, | ||||
|             ) | ||||
|             self._semantics.add_action_to_directive( | ||||
|                 raise_labels, | ||||
|                 "SetMode", | ||||
|                 {"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}"}, | ||||
|                 {"mode": f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}"}, | ||||
|             ) | ||||
|  | ||||
|             return self._semantics.serialize_semantics() | ||||
|   | ||||
| @@ -1261,9 +1261,9 @@ async def async_api_set_mode( | ||||
|     elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}": | ||||
|         position = mode.split(".")[1] | ||||
|  | ||||
|         if position == cover.CoverState.CLOSED: | ||||
|         if position == cover.STATE_CLOSED: | ||||
|             service = cover.SERVICE_CLOSE_COVER | ||||
|         elif position == cover.CoverState.OPEN: | ||||
|         elif position == cover.STATE_OPEN: | ||||
|             service = cover.SERVICE_OPEN_COVER | ||||
|         elif position == "custom": | ||||
|             service = cover.SERVICE_STOP_COVER | ||||
|   | ||||
| @@ -10,7 +10,6 @@ from aioamazondevices.api import AmazonDevice | ||||
| from aioamazondevices.const import SENSOR_STATE_OFF | ||||
|  | ||||
| from homeassistant.components.binary_sensor import ( | ||||
|     DOMAIN as BINARY_SENSOR_DOMAIN, | ||||
|     BinarySensorDeviceClass, | ||||
|     BinarySensorEntity, | ||||
|     BinarySensorEntityDescription, | ||||
| @@ -18,12 +17,9 @@ from homeassistant.components.binary_sensor import ( | ||||
| from homeassistant.const import EntityCategory | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
| import homeassistant.helpers.entity_registry as er | ||||
|  | ||||
| from .const import _LOGGER, DOMAIN | ||||
| from .coordinator import AmazonConfigEntry | ||||
| from .entity import AmazonEntity | ||||
| from .utils import async_update_unique_id | ||||
|  | ||||
| # Coordinator is used to centralize the data updates | ||||
| PARALLEL_UPDATES = 0 | ||||
| @@ -35,7 +31,6 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription): | ||||
|  | ||||
|     is_on_fn: Callable[[AmazonDevice, str], bool] | ||||
|     is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True | ||||
|     is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True | ||||
|  | ||||
|  | ||||
| BINARY_SENSORS: Final = ( | ||||
| @@ -45,52 +40,47 @@ BINARY_SENSORS: Final = ( | ||||
|         entity_category=EntityCategory.DIAGNOSTIC, | ||||
|         is_on_fn=lambda device, _: device.online, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="detectionState", | ||||
|         device_class=BinarySensorDeviceClass.MOTION, | ||||
|         is_on_fn=lambda device, key: bool( | ||||
|             device.sensors[key].value != SENSOR_STATE_OFF | ||||
|         ), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|         is_available_fn=lambda device, key: ( | ||||
|             device.online | ||||
|             and (sensor := device.sensors.get(key)) is not None | ||||
|             and sensor.error is False | ||||
|         ), | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| DEPRECATED_BINARY_SENSORS: Final = ( | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="bluetooth", | ||||
|         entity_category=EntityCategory.DIAGNOSTIC, | ||||
|         translation_key="bluetooth", | ||||
|         is_on_fn=lambda device, key: False, | ||||
|         is_on_fn=lambda device, _: device.bluetooth_state, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="babyCryDetectionState", | ||||
|         translation_key="baby_cry_detection", | ||||
|         is_on_fn=lambda device, key: False, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="beepingApplianceDetectionState", | ||||
|         translation_key="beeping_appliance_detection", | ||||
|         is_on_fn=lambda device, key: False, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="coughDetectionState", | ||||
|         translation_key="cough_detection", | ||||
|         is_on_fn=lambda device, key: False, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="dogBarkDetectionState", | ||||
|         translation_key="dog_bark_detection", | ||||
|         is_on_fn=lambda device, key: False, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="humanPresenceDetectionState", | ||||
|         device_class=BinarySensorDeviceClass.MOTION, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
|     AmazonBinarySensorEntityDescription( | ||||
|         key="waterSoundsDetectionState", | ||||
|         translation_key="water_sounds_detection", | ||||
|         is_on_fn=lambda device, key: False, | ||||
|         is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF), | ||||
|         is_supported=lambda device, key: device.sensors.get(key) is not None, | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| @@ -104,46 +94,13 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     entity_registry = er.async_get(hass) | ||||
|  | ||||
|     # Replace unique id for "detectionState" binary sensor | ||||
|     await async_update_unique_id( | ||||
|         hass, | ||||
|         coordinator, | ||||
|         BINARY_SENSOR_DOMAIN, | ||||
|         "humanPresenceDetectionState", | ||||
|         "detectionState", | ||||
|     async_add_entities( | ||||
|         AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc) | ||||
|         for sensor_desc in BINARY_SENSORS | ||||
|         for serial_num in coordinator.data | ||||
|         if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key) | ||||
|     ) | ||||
|  | ||||
|     # Clean up deprecated sensors | ||||
|     for sensor_desc in DEPRECATED_BINARY_SENSORS: | ||||
|         for serial_num in coordinator.data: | ||||
|             unique_id = f"{serial_num}-{sensor_desc.key}" | ||||
|             if entity_id := entity_registry.async_get_entity_id( | ||||
|                 BINARY_SENSOR_DOMAIN, DOMAIN, unique_id | ||||
|             ): | ||||
|                 _LOGGER.debug("Removing deprecated entity %s", entity_id) | ||||
|                 entity_registry.async_remove(entity_id) | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|             async_add_entities( | ||||
|                 AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc) | ||||
|                 for sensor_desc in BINARY_SENSORS | ||||
|                 for serial_num in new_devices | ||||
|                 if sensor_desc.is_supported( | ||||
|                     coordinator.data[serial_num], sensor_desc.key | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity): | ||||
|     """Binary sensor device.""" | ||||
| @@ -156,13 +113,3 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity): | ||||
|         return self.entity_description.is_on_fn( | ||||
|             self.device, self.entity_description.key | ||||
|         ) | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             self.entity_description.is_available_fn( | ||||
|                 self.device, self.entity_description.key | ||||
|             ) | ||||
|             and super().available | ||||
|         ) | ||||
|   | ||||
| @@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 data = await validate_input(self.hass, user_input) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except CannotAuthenticate: | ||||
|             except (CannotAuthenticate, TypeError): | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except CannotRetrieveData: | ||||
|                 errors["base"] = "cannot_retrieve_data" | ||||
| @@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 ) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except CannotAuthenticate: | ||||
|             except (CannotAuthenticate, TypeError): | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except CannotRetrieveData: | ||||
|                 errors["base"] = "cannot_retrieve_data" | ||||
|   | ||||
| @@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]): | ||||
|                 translation_key="cannot_retrieve_data_with_error", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|         except CannotAuthenticate as err: | ||||
|         except (CannotAuthenticate, TypeError) as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="invalid_auth", | ||||
|   | ||||
| @@ -60,5 +60,7 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]: | ||||
|         "online": device.online, | ||||
|         "serial number": device.serial_number, | ||||
|         "software version": device.software_version, | ||||
|         "sensors": device.sensors, | ||||
|         "do not disturb": device.do_not_disturb, | ||||
|         "response style": device.response_style, | ||||
|         "bluetooth state": device.bluetooth_state, | ||||
|     } | ||||
|   | ||||
| @@ -1,4 +1,44 @@ | ||||
| { | ||||
|   "entity": { | ||||
|     "binary_sensor": { | ||||
|       "bluetooth": { | ||||
|         "default": "mdi:bluetooth-off", | ||||
|         "state": { | ||||
|           "on": "mdi:bluetooth" | ||||
|         } | ||||
|       }, | ||||
|       "baby_cry_detection": { | ||||
|         "default": "mdi:account-voice-off", | ||||
|         "state": { | ||||
|           "on": "mdi:account-voice" | ||||
|         } | ||||
|       }, | ||||
|       "beeping_appliance_detection": { | ||||
|         "default": "mdi:bell-off", | ||||
|         "state": { | ||||
|           "on": "mdi:bell-ring" | ||||
|         } | ||||
|       }, | ||||
|       "cough_detection": { | ||||
|         "default": "mdi:blur-off", | ||||
|         "state": { | ||||
|           "on": "mdi:blur" | ||||
|         } | ||||
|       }, | ||||
|       "dog_bark_detection": { | ||||
|         "default": "mdi:dog-side-off", | ||||
|         "state": { | ||||
|           "on": "mdi:dog-side" | ||||
|         } | ||||
|       }, | ||||
|       "water_sounds_detection": { | ||||
|         "default": "mdi:water-pump-off", | ||||
|         "state": { | ||||
|           "on": "mdi:water-pump" | ||||
|         } | ||||
|       } | ||||
|     } | ||||
|   }, | ||||
|   "services": { | ||||
|     "send_sound": { | ||||
|       "service": "mdi:cast-audio" | ||||
|   | ||||
| @@ -7,6 +7,6 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["aioamazondevices"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["aioamazondevices==6.4.6"] | ||||
|   "quality_scale": "silver", | ||||
|   "requirements": ["aioamazondevices==6.0.0"] | ||||
| } | ||||
|   | ||||
| @@ -57,23 +57,13 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|             async_add_entities( | ||||
|                 AmazonNotifyEntity(coordinator, serial_num, sensor_desc) | ||||
|                 for sensor_desc in NOTIFY | ||||
|                 for serial_num in new_devices | ||||
|                 if sensor_desc.subkey in coordinator.data[serial_num].capabilities | ||||
|                 and sensor_desc.is_supported(coordinator.data[serial_num]) | ||||
|             ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|     async_add_entities( | ||||
|         AmazonNotifyEntity(coordinator, serial_num, sensor_desc) | ||||
|         for sensor_desc in NOTIFY | ||||
|         for serial_num in coordinator.data | ||||
|         if sensor_desc.subkey in coordinator.data[serial_num].capabilities | ||||
|         and sensor_desc.is_supported(coordinator.data[serial_num]) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class AmazonNotifyEntity(AmazonEntity, NotifyEntity): | ||||
|   | ||||
| @@ -53,7 +53,7 @@ rules: | ||||
|   docs-supported-functions: done | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   dynamic-devices: done | ||||
|   dynamic-devices: todo | ||||
|   entity-category: done | ||||
|   entity-device-class: done | ||||
|   entity-disabled-by-default: done | ||||
|   | ||||
| @@ -31,20 +31,15 @@ class AmazonSensorEntityDescription(SensorEntityDescription): | ||||
|     """Amazon Devices sensor entity description.""" | ||||
|  | ||||
|     native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None | ||||
|     is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( | ||||
|         device.online | ||||
|         and (sensor := device.sensors.get(key)) is not None | ||||
|         and sensor.error is False | ||||
|     ) | ||||
|  | ||||
|  | ||||
| SENSORS: Final = ( | ||||
|     AmazonSensorEntityDescription( | ||||
|         key="temperature", | ||||
|         device_class=SensorDeviceClass.TEMPERATURE, | ||||
|         native_unit_of_measurement_fn=lambda device, key: ( | ||||
|         native_unit_of_measurement_fn=lambda device, _key: ( | ||||
|             UnitOfTemperature.CELSIUS | ||||
|             if key in device.sensors and device.sensors[key].scale == "CELSIUS" | ||||
|             if device.sensors[_key].scale == "CELSIUS" | ||||
|             else UnitOfTemperature.FAHRENHEIT | ||||
|         ), | ||||
|         state_class=SensorStateClass.MEASUREMENT, | ||||
| @@ -67,22 +62,12 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|             async_add_entities( | ||||
|                 AmazonSensorEntity(coordinator, serial_num, sensor_desc) | ||||
|                 for sensor_desc in SENSORS | ||||
|                 for serial_num in new_devices | ||||
|                 if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None | ||||
|             ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|     async_add_entities( | ||||
|         AmazonSensorEntity(coordinator, serial_num, sensor_desc) | ||||
|         for sensor_desc in SENSORS | ||||
|         for serial_num in coordinator.data | ||||
|         if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class AmazonSensorEntity(AmazonEntity, SensorEntity): | ||||
| @@ -104,13 +89,3 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity): | ||||
|     def native_value(self) -> StateType: | ||||
|         """Return the state of the sensor.""" | ||||
|         return self.device.sensors[self.entity_description.key].value | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             self.entity_description.is_available_fn( | ||||
|                 self.device, self.entity_description.key | ||||
|             ) | ||||
|             and super().available | ||||
|         ) | ||||
|   | ||||
| @@ -58,6 +58,26 @@ | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
|     "binary_sensor": { | ||||
|       "bluetooth": { | ||||
|         "name": "Bluetooth" | ||||
|       }, | ||||
|       "baby_cry_detection": { | ||||
|         "name": "Baby crying" | ||||
|       }, | ||||
|       "beeping_appliance_detection": { | ||||
|         "name": "Beeping appliance" | ||||
|       }, | ||||
|       "cough_detection": { | ||||
|         "name": "Coughing" | ||||
|       }, | ||||
|       "dog_bark_detection": { | ||||
|         "name": "Dog barking" | ||||
|       }, | ||||
|       "water_sounds_detection": { | ||||
|         "name": "Water sounds" | ||||
|       } | ||||
|     }, | ||||
|     "notify": { | ||||
|       "speak": { | ||||
|         "name": "Speak" | ||||
|   | ||||
| @@ -8,21 +8,13 @@ from typing import TYPE_CHECKING, Any, Final | ||||
|  | ||||
| from aioamazondevices.api import AmazonDevice | ||||
|  | ||||
| from homeassistant.components.switch import ( | ||||
|     DOMAIN as SWITCH_DOMAIN, | ||||
|     SwitchEntity, | ||||
|     SwitchEntityDescription, | ||||
| ) | ||||
| from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
|  | ||||
| from .coordinator import AmazonConfigEntry | ||||
| from .entity import AmazonEntity | ||||
| from .utils import ( | ||||
|     alexa_api_call, | ||||
|     async_remove_dnd_from_virtual_group, | ||||
|     async_update_unique_id, | ||||
| ) | ||||
| from .utils import alexa_api_call | ||||
|  | ||||
| PARALLEL_UPDATES = 1 | ||||
|  | ||||
| @@ -32,19 +24,16 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription): | ||||
|     """Alexa Devices switch entity description.""" | ||||
|  | ||||
|     is_on_fn: Callable[[AmazonDevice], bool] | ||||
|     is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( | ||||
|         device.online | ||||
|         and (sensor := device.sensors.get(key)) is not None | ||||
|         and sensor.error is False | ||||
|     ) | ||||
|     subkey: str | ||||
|     method: str | ||||
|  | ||||
|  | ||||
| SWITCHES: Final = ( | ||||
|     AmazonSwitchEntityDescription( | ||||
|         key="dnd", | ||||
|         key="do_not_disturb", | ||||
|         subkey="AUDIO_PLAYER", | ||||
|         translation_key="do_not_disturb", | ||||
|         is_on_fn=lambda device: bool(device.sensors["dnd"].value), | ||||
|         is_on_fn=lambda _device: _device.do_not_disturb, | ||||
|         method="set_do_not_disturb", | ||||
|     ), | ||||
| ) | ||||
| @@ -59,31 +48,13 @@ async def async_setup_entry( | ||||
|  | ||||
|     coordinator = entry.runtime_data | ||||
|  | ||||
|     # Replace unique id for "DND" switch and remove from Speaker Group | ||||
|     await async_update_unique_id( | ||||
|         hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd" | ||||
|     async_add_entities( | ||||
|         AmazonSwitchEntity(coordinator, serial_num, switch_desc) | ||||
|         for switch_desc in SWITCHES | ||||
|         for serial_num in coordinator.data | ||||
|         if switch_desc.subkey in coordinator.data[serial_num].capabilities | ||||
|     ) | ||||
|  | ||||
|     # Remove DND switch from virtual groups | ||||
|     await async_remove_dnd_from_virtual_group(hass, coordinator) | ||||
|  | ||||
|     known_devices: set[str] = set() | ||||
|  | ||||
|     def _check_device() -> None: | ||||
|         current_devices = set(coordinator.data) | ||||
|         new_devices = current_devices - known_devices | ||||
|         if new_devices: | ||||
|             known_devices.update(new_devices) | ||||
|             async_add_entities( | ||||
|                 AmazonSwitchEntity(coordinator, serial_num, switch_desc) | ||||
|                 for switch_desc in SWITCHES | ||||
|                 for serial_num in new_devices | ||||
|                 if switch_desc.key in coordinator.data[serial_num].sensors | ||||
|             ) | ||||
|  | ||||
|     _check_device() | ||||
|     entry.async_on_unload(coordinator.async_add_listener(_check_device)) | ||||
|  | ||||
|  | ||||
| class AmazonSwitchEntity(AmazonEntity, SwitchEntity): | ||||
|     """Switch device.""" | ||||
| @@ -113,13 +84,3 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity): | ||||
|     def is_on(self) -> bool: | ||||
|         """Return True if switch is on.""" | ||||
|         return self.entity_description.is_on_fn(self.device) | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return ( | ||||
|             self.entity_description.is_available_fn( | ||||
|                 self.device, self.entity_description.key | ||||
|             ) | ||||
|             and super().available | ||||
|         ) | ||||
|   | ||||
| @@ -4,16 +4,11 @@ from collections.abc import Awaitable, Callable, Coroutine | ||||
| from functools import wraps | ||||
| from typing import Any, Concatenate | ||||
|  | ||||
| from aioamazondevices.const import SPEAKER_GROUP_FAMILY | ||||
| from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData | ||||
|  | ||||
| from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| import homeassistant.helpers.entity_registry as er | ||||
|  | ||||
| from .const import _LOGGER, DOMAIN | ||||
| from .coordinator import AmazonDevicesCoordinator | ||||
| from .const import DOMAIN | ||||
| from .entity import AmazonEntity | ||||
|  | ||||
|  | ||||
| @@ -43,41 +38,3 @@ def alexa_api_call[_T: AmazonEntity, **_P]( | ||||
|             ) from err | ||||
|  | ||||
|     return cmd_wrapper | ||||
|  | ||||
|  | ||||
| async def async_update_unique_id( | ||||
|     hass: HomeAssistant, | ||||
|     coordinator: AmazonDevicesCoordinator, | ||||
|     domain: str, | ||||
|     old_key: str, | ||||
|     new_key: str, | ||||
| ) -> None: | ||||
|     """Update unique id for entities created with old format.""" | ||||
|     entity_registry = er.async_get(hass) | ||||
|  | ||||
|     for serial_num in coordinator.data: | ||||
|         unique_id = f"{serial_num}-{old_key}" | ||||
|         if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id): | ||||
|             _LOGGER.debug("Updating unique_id for %s", entity_id) | ||||
|             new_unique_id = unique_id.replace(old_key, new_key) | ||||
|  | ||||
|             # Update the registry with the new unique_id | ||||
|             entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id) | ||||
|  | ||||
|  | ||||
| async def async_remove_dnd_from_virtual_group( | ||||
|     hass: HomeAssistant, | ||||
|     coordinator: AmazonDevicesCoordinator, | ||||
| ) -> None: | ||||
|     """Remove entity DND from virtual group.""" | ||||
|     entity_registry = er.async_get(hass) | ||||
|  | ||||
|     for serial_num in coordinator.data: | ||||
|         unique_id = f"{serial_num}-do_not_disturb" | ||||
|         entity_id = entity_registry.async_get_entity_id( | ||||
|             DOMAIN, SWITCH_DOMAIN, unique_id | ||||
|         ) | ||||
|         is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY | ||||
|         if entity_id and is_group: | ||||
|             entity_registry.async_remove(entity_id) | ||||
|             _LOGGER.debug("Removed DND switch from virtual group %s", entity_id) | ||||
|   | ||||
| @@ -65,31 +65,6 @@ SENSOR_DESCRIPTIONS = [ | ||||
|         suggested_display_precision=2, | ||||
|         translation_placeholders={"sensor_name": "BME280"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.HUMIDITY, | ||||
|         key="BME680_humidity", | ||||
|         translation_key="humidity", | ||||
|         native_unit_of_measurement=PERCENTAGE, | ||||
|         suggested_display_precision=2, | ||||
|         translation_placeholders={"sensor_name": "BME680"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.PRESSURE, | ||||
|         key="BME680_pressure", | ||||
|         translation_key="pressure", | ||||
|         native_unit_of_measurement=UnitOfPressure.PA, | ||||
|         suggested_unit_of_measurement=UnitOfPressure.MMHG, | ||||
|         suggested_display_precision=0, | ||||
|         translation_placeholders={"sensor_name": "BME680"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.TEMPERATURE, | ||||
|         key="BME680_temperature", | ||||
|         translation_key="temperature", | ||||
|         native_unit_of_measurement=UnitOfTemperature.CELSIUS, | ||||
|         suggested_display_precision=2, | ||||
|         translation_placeholders={"sensor_name": "BME680"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.PRESSURE, | ||||
|         key="BMP_pressure", | ||||
|   | ||||
| @@ -41,7 +41,7 @@ def async_setup_services(hass: HomeAssistant) -> None: | ||||
|         if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE: | ||||
|             return [] | ||||
|  | ||||
|         call_ids = await async_extract_entity_ids(call) | ||||
|         call_ids = await async_extract_entity_ids(hass, call) | ||||
|         entity_ids = [] | ||||
|         for entity_id in hass.data[DATA_AMCREST][CAMERAS]: | ||||
|             if entity_id not in call_ids: | ||||
|   | ||||
| @@ -12,25 +12,10 @@ from homeassistant.helpers.event import async_call_later, async_track_time_inter | ||||
| from homeassistant.helpers.typing import ConfigType | ||||
| from homeassistant.util.hass_dict import HassKey | ||||
|  | ||||
| from .analytics import ( | ||||
|     Analytics, | ||||
|     AnalyticsInput, | ||||
|     AnalyticsModifications, | ||||
|     DeviceAnalyticsModifications, | ||||
|     EntityAnalyticsModifications, | ||||
|     async_devices_payload, | ||||
| ) | ||||
| from .analytics import Analytics | ||||
| from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA | ||||
| from .http import AnalyticsDevicesView | ||||
|  | ||||
| __all__ = [ | ||||
|     "AnalyticsInput", | ||||
|     "AnalyticsModifications", | ||||
|     "DeviceAnalyticsModifications", | ||||
|     "EntityAnalyticsModifications", | ||||
|     "async_devices_payload", | ||||
| ] | ||||
|  | ||||
| CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) | ||||
|  | ||||
| DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN) | ||||
|   | ||||
| @@ -4,10 +4,9 @@ from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from asyncio import timeout | ||||
| from collections.abc import Awaitable, Callable, Iterable, Mapping | ||||
| from dataclasses import asdict as dataclass_asdict, dataclass, field | ||||
| from dataclasses import asdict as dataclass_asdict, dataclass | ||||
| from datetime import datetime | ||||
| from typing import Any, Protocol | ||||
| from typing import Any | ||||
| import uuid | ||||
|  | ||||
| import aiohttp | ||||
| @@ -36,14 +35,11 @@ from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import device_registry as dr, entity_registry as er | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.hassio import is_hassio | ||||
| from homeassistant.helpers.singleton import singleton | ||||
| from homeassistant.helpers.storage import Store | ||||
| from homeassistant.helpers.system_info import async_get_system_info | ||||
| from homeassistant.helpers.typing import UNDEFINED | ||||
| from homeassistant.loader import ( | ||||
|     Integration, | ||||
|     IntegrationNotFound, | ||||
|     async_get_integration, | ||||
|     async_get_integrations, | ||||
| ) | ||||
| from homeassistant.setup import async_get_loaded_integrations | ||||
| @@ -79,115 +75,12 @@ from .const import ( | ||||
|     ATTR_USER_COUNT, | ||||
|     ATTR_UUID, | ||||
|     ATTR_VERSION, | ||||
|     DOMAIN, | ||||
|     LOGGER, | ||||
|     PREFERENCE_SCHEMA, | ||||
|     STORAGE_KEY, | ||||
|     STORAGE_VERSION, | ||||
| ) | ||||
|  | ||||
| DATA_ANALYTICS_MODIFIERS = "analytics_modifiers" | ||||
|  | ||||
| type AnalyticsModifier = Callable[ | ||||
|     [HomeAssistant, AnalyticsInput], Awaitable[AnalyticsModifications] | ||||
| ] | ||||
|  | ||||
|  | ||||
| @singleton(DATA_ANALYTICS_MODIFIERS) | ||||
| def _async_get_modifiers( | ||||
|     hass: HomeAssistant, | ||||
| ) -> dict[str, AnalyticsModifier | None]: | ||||
|     """Return the analytics modifiers.""" | ||||
|     return {} | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class AnalyticsInput: | ||||
|     """Analytics input for a single integration. | ||||
|  | ||||
|     This is sent to integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     device_ids: Iterable[str] = field(default_factory=list) | ||||
|     entity_ids: Iterable[str] = field(default_factory=list) | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class AnalyticsModifications: | ||||
|     """Analytics config for a single integration. | ||||
|  | ||||
|     This is used by integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     remove: bool = False | ||||
|     devices: Mapping[str, DeviceAnalyticsModifications] | None = None | ||||
|     entities: Mapping[str, EntityAnalyticsModifications] | None = None | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class DeviceAnalyticsModifications: | ||||
|     """Analytics config for a single device. | ||||
|  | ||||
|     This is used by integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     remove: bool = False | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class EntityAnalyticsModifications: | ||||
|     """Analytics config for a single entity. | ||||
|  | ||||
|     This is used by integrations that implement the platform. | ||||
|     """ | ||||
|  | ||||
|     remove: bool = False | ||||
|  | ||||
|  | ||||
| class AnalyticsPlatformProtocol(Protocol): | ||||
|     """Define the format of analytics platforms.""" | ||||
|  | ||||
|     async def async_modify_analytics( | ||||
|         self, | ||||
|         hass: HomeAssistant, | ||||
|         analytics_input: AnalyticsInput, | ||||
|     ) -> AnalyticsModifications: | ||||
|         """Modify the analytics.""" | ||||
|  | ||||
|  | ||||
| async def _async_get_analytics_platform( | ||||
|     hass: HomeAssistant, domain: str | ||||
| ) -> AnalyticsPlatformProtocol | None: | ||||
|     """Get analytics platform.""" | ||||
|     try: | ||||
|         integration = await async_get_integration(hass, domain) | ||||
|     except IntegrationNotFound: | ||||
|         return None | ||||
|     try: | ||||
|         return await integration.async_get_platform(DOMAIN) | ||||
|     except ImportError: | ||||
|         return None | ||||
|  | ||||
|  | ||||
| async def _async_get_modifier( | ||||
|     hass: HomeAssistant, domain: str | ||||
| ) -> AnalyticsModifier | None: | ||||
|     """Get analytics modifier.""" | ||||
|     modifiers = _async_get_modifiers(hass) | ||||
|     modifier = modifiers.get(domain, UNDEFINED) | ||||
|  | ||||
|     if modifier is not UNDEFINED: | ||||
|         return modifier | ||||
|  | ||||
|     platform = await _async_get_analytics_platform(hass, domain) | ||||
|     if platform is None: | ||||
|         modifiers[domain] = None | ||||
|         return None | ||||
|  | ||||
|     modifier = getattr(platform, "async_modify_analytics", None) | ||||
|     modifiers[domain] = modifier | ||||
|     return modifier | ||||
|  | ||||
|  | ||||
| def gen_uuid() -> str: | ||||
|     """Generate a new UUID.""" | ||||
| @@ -500,22 +393,17 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]: | ||||
|     return domains | ||||
|  | ||||
|  | ||||
| DEFAULT_ANALYTICS_CONFIG = AnalyticsModifications() | ||||
| DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications() | ||||
| DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications() | ||||
|  | ||||
|  | ||||
| async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
| async def async_devices_payload(hass: HomeAssistant) -> dict: | ||||
|     """Return detailed information about entities and devices.""" | ||||
|     integrations_info: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|     dev_reg = dr.async_get(hass) | ||||
|     ent_reg = er.async_get(hass) | ||||
|  | ||||
|     integration_inputs: dict[str, tuple[list[str], list[str]]] = {} | ||||
|     integration_configs: dict[str, AnalyticsModifications] = {} | ||||
|     # We need to refer to other devices, for example in `via_device` field. | ||||
|     # We don't however send the original device ids outside of Home Assistant, | ||||
|     # instead we refer to devices by (integration_domain, index_in_integration_device_list). | ||||
|     device_id_mapping: dict[str, tuple[str, int]] = {} | ||||
|  | ||||
|     removed_devices: set[str] = set() | ||||
|  | ||||
|     # Get device list | ||||
|     for device_entry in dev_reg.devices.values(): | ||||
|         if not device_entry.primary_config_entry: | ||||
|             continue | ||||
| @@ -527,119 +415,28 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|         if config_entry is None: | ||||
|             continue | ||||
|  | ||||
|         if device_entry.entry_type is dr.DeviceEntryType.SERVICE: | ||||
|             removed_devices.add(device_entry.id) | ||||
|             continue | ||||
|  | ||||
|         integration_domain = config_entry.domain | ||||
|  | ||||
|         integration_input = integration_inputs.setdefault(integration_domain, ([], [])) | ||||
|         integration_input[0].append(device_entry.id) | ||||
|  | ||||
|     # Get entity list | ||||
|     for entity_entry in ent_reg.entities.values(): | ||||
|         integration_domain = entity_entry.platform | ||||
|  | ||||
|         integration_input = integration_inputs.setdefault(integration_domain, ([], [])) | ||||
|         integration_input[1].append(entity_entry.entity_id) | ||||
|  | ||||
|     integrations = { | ||||
|         domain: integration | ||||
|         for domain, integration in ( | ||||
|             await async_get_integrations(hass, integration_inputs.keys()) | ||||
|         ).items() | ||||
|         if isinstance(integration, Integration) | ||||
|     } | ||||
|  | ||||
|     # Filter out custom integrations and integrations that are not device or hub type | ||||
|     integration_inputs = { | ||||
|         domain: integration_info | ||||
|         for domain, integration_info in integration_inputs.items() | ||||
|         if (integration := integrations.get(domain)) is not None | ||||
|         and integration.is_built_in | ||||
|         and integration.manifest.get("integration_type") in ("device", "hub") | ||||
|     } | ||||
|  | ||||
|     # Call integrations that implement the analytics platform | ||||
|     for integration_domain, integration_input in integration_inputs.items(): | ||||
|         if ( | ||||
|             modifier := await _async_get_modifier(hass, integration_domain) | ||||
|         ) is not None: | ||||
|             try: | ||||
|                 integration_config = await modifier( | ||||
|                     hass, AnalyticsInput(*integration_input) | ||||
|                 ) | ||||
|             except Exception as err:  # noqa: BLE001 | ||||
|                 LOGGER.exception( | ||||
|                     "Calling async_modify_analytics for integration '%s' failed: %s", | ||||
|                     integration_domain, | ||||
|                     err, | ||||
|                 ) | ||||
|                 integration_configs[integration_domain] = AnalyticsModifications( | ||||
|                     remove=True | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             if not isinstance(integration_config, AnalyticsModifications): | ||||
|                 LOGGER.error(  # type: ignore[unreachable] | ||||
|                     "Calling async_modify_analytics for integration '%s' did not return an AnalyticsConfig", | ||||
|                     integration_domain, | ||||
|                 ) | ||||
|                 integration_configs[integration_domain] = AnalyticsModifications( | ||||
|                     remove=True | ||||
|                 ) | ||||
|                 continue | ||||
|  | ||||
|             integration_configs[integration_domain] = integration_config | ||||
|  | ||||
|     integrations_info: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|     # We need to refer to other devices, for example in `via_device` field. | ||||
|     # We don't however send the original device ids outside of Home Assistant, | ||||
|     # instead we refer to devices by (integration_domain, index_in_integration_device_list). | ||||
|     device_id_mapping: dict[str, tuple[str, int]] = {} | ||||
|  | ||||
|     # Fill out information about devices | ||||
|     for integration_domain, integration_input in integration_inputs.items(): | ||||
|         integration_config = integration_configs.get( | ||||
|             integration_domain, DEFAULT_ANALYTICS_CONFIG | ||||
|         ) | ||||
|  | ||||
|         if integration_config.remove: | ||||
|             continue | ||||
|  | ||||
|         integration_info = integrations_info.setdefault( | ||||
|             integration_domain, {"devices": [], "entities": []} | ||||
|         ) | ||||
|  | ||||
|         devices_info = integration_info["devices"] | ||||
|  | ||||
|         for device_id in integration_input[0]: | ||||
|             device_config = DEFAULT_DEVICE_ANALYTICS_CONFIG | ||||
|             if integration_config.devices is not None: | ||||
|                 device_config = integration_config.devices.get(device_id, device_config) | ||||
|         device_id_mapping[device_entry.id] = (integration_domain, len(devices_info)) | ||||
|  | ||||
|             if device_config.remove: | ||||
|                 removed_devices.add(device_id) | ||||
|                 continue | ||||
|  | ||||
|             device_entry = dev_reg.devices[device_id] | ||||
|  | ||||
|             device_id_mapping[device_id] = (integration_domain, len(devices_info)) | ||||
|  | ||||
|             devices_info.append( | ||||
|                 { | ||||
|                     "entry_type": device_entry.entry_type, | ||||
|                     "has_configuration_url": device_entry.configuration_url is not None, | ||||
|                     "hw_version": device_entry.hw_version, | ||||
|                     "manufacturer": device_entry.manufacturer, | ||||
|                     "model": device_entry.model, | ||||
|                     "model_id": device_entry.model_id, | ||||
|                     "sw_version": device_entry.sw_version, | ||||
|                     "via_device": device_entry.via_device_id, | ||||
|                     "entities": [], | ||||
|                 } | ||||
|             ) | ||||
|         devices_info.append( | ||||
|             { | ||||
|                 "entities": [], | ||||
|                 "entry_type": device_entry.entry_type, | ||||
|                 "has_configuration_url": device_entry.configuration_url is not None, | ||||
|                 "hw_version": device_entry.hw_version, | ||||
|                 "manufacturer": device_entry.manufacturer, | ||||
|                 "model": device_entry.model, | ||||
|                 "model_id": device_entry.model_id, | ||||
|                 "sw_version": device_entry.sw_version, | ||||
|                 "via_device": device_entry.via_device_id, | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     # Fill out via_device with new device ids | ||||
|     for integration_info in integrations_info.values(): | ||||
| @@ -648,15 +445,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|                 continue | ||||
|             device_info["via_device"] = device_id_mapping.get(device_info["via_device"]) | ||||
|  | ||||
|     # Fill out information about entities | ||||
|     for integration_domain, integration_input in integration_inputs.items(): | ||||
|         integration_config = integration_configs.get( | ||||
|             integration_domain, DEFAULT_ANALYTICS_CONFIG | ||||
|         ) | ||||
|  | ||||
|         if integration_config.remove: | ||||
|             continue | ||||
|     ent_reg = er.async_get(hass) | ||||
|  | ||||
|     for entity_entry in ent_reg.entities.values(): | ||||
|         integration_domain = entity_entry.platform | ||||
|         integration_info = integrations_info.setdefault( | ||||
|             integration_domain, {"devices": [], "entities": []} | ||||
|         ) | ||||
| @@ -664,53 +456,53 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|         devices_info = integration_info["devices"] | ||||
|         entities_info = integration_info["entities"] | ||||
|  | ||||
|         for entity_id in integration_input[1]: | ||||
|             entity_config = DEFAULT_ENTITY_ANALYTICS_CONFIG | ||||
|             if integration_config.entities is not None: | ||||
|                 entity_config = integration_config.entities.get( | ||||
|                     entity_id, entity_config | ||||
|                 ) | ||||
|         entity_state = hass.states.get(entity_entry.entity_id) | ||||
|  | ||||
|             if entity_config.remove: | ||||
|                 continue | ||||
|  | ||||
|             entity_entry = ent_reg.entities[entity_id] | ||||
|  | ||||
|             entity_state = hass.states.get(entity_id) | ||||
|  | ||||
|             entity_info = { | ||||
|                 # LIMITATION: `assumed_state` can be overridden by users; | ||||
|                 # we should replace it with the original value in the future. | ||||
|                 # It is also not present, if entity is not in the state machine, | ||||
|                 # which can happen for disabled entities. | ||||
|                 "assumed_state": ( | ||||
|                     entity_state.attributes.get(ATTR_ASSUMED_STATE, False) | ||||
|                     if entity_state is not None | ||||
|                     else None | ||||
|                 ), | ||||
|                 "domain": entity_entry.domain, | ||||
|                 "entity_category": entity_entry.entity_category, | ||||
|                 "has_entity_name": entity_entry.has_entity_name, | ||||
|                 "original_device_class": entity_entry.original_device_class, | ||||
|                 # LIMITATION: `unit_of_measurement` can be overridden by users; | ||||
|                 # we should replace it with the original value in the future. | ||||
|                 "unit_of_measurement": entity_entry.unit_of_measurement, | ||||
|             } | ||||
|  | ||||
|             if (device_id_ := entity_entry.device_id) is not None: | ||||
|                 if device_id_ in removed_devices: | ||||
|                     # The device was removed, so we remove the entity too | ||||
|                     continue | ||||
|  | ||||
|                 if ( | ||||
|                     new_device_id := device_id_mapping.get(device_id_) | ||||
|                 ) is not None and (new_device_id[0] == integration_domain): | ||||
|                     device_info = devices_info[new_device_id[1]] | ||||
|                     device_info["entities"].append(entity_info) | ||||
|                     continue | ||||
|         entity_info = { | ||||
|             # LIMITATION: `assumed_state` can be overridden by users; | ||||
|             # we should replace it with the original value in the future. | ||||
|             # It is also not present, if entity is not in the state machine, | ||||
|             # which can happen for disabled entities. | ||||
|             "assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False) | ||||
|             if entity_state is not None | ||||
|             else None, | ||||
|             "capabilities": entity_entry.capabilities, | ||||
|             "domain": entity_entry.domain, | ||||
|             "entity_category": entity_entry.entity_category, | ||||
|             "has_entity_name": entity_entry.has_entity_name, | ||||
|             "original_device_class": entity_entry.original_device_class, | ||||
|             # LIMITATION: `unit_of_measurement` can be overridden by users; | ||||
|             # we should replace it with the original value in the future. | ||||
|             "unit_of_measurement": entity_entry.unit_of_measurement, | ||||
|         } | ||||
|  | ||||
|         if ( | ||||
|             ((device_id := entity_entry.device_id) is not None) | ||||
|             and ((new_device_id := device_id_mapping.get(device_id)) is not None) | ||||
|             and (new_device_id[0] == integration_domain) | ||||
|         ): | ||||
|             device_info = devices_info[new_device_id[1]] | ||||
|             device_info["entities"].append(entity_info) | ||||
|         else: | ||||
|             entities_info.append(entity_info) | ||||
|  | ||||
|     integrations = { | ||||
|         domain: integration | ||||
|         for domain, integration in ( | ||||
|             await async_get_integrations(hass, integrations_info.keys()) | ||||
|         ).items() | ||||
|         if isinstance(integration, Integration) | ||||
|     } | ||||
|  | ||||
|     for domain, integration_info in integrations_info.items(): | ||||
|         if integration := integrations.get(domain): | ||||
|             integration_info["is_custom_integration"] = not integration.is_built_in | ||||
|             # Include version for custom integrations | ||||
|             if not integration.is_built_in and integration.version: | ||||
|                 integration_info["custom_integration_version"] = str( | ||||
|                     integration.version | ||||
|                 ) | ||||
|  | ||||
|     return { | ||||
|         "version": "home-assistant:1", | ||||
|         "home_assistant": HA_VERSION, | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|   "domain": "analytics", | ||||
|   "name": "Analytics", | ||||
|   "after_dependencies": ["energy", "hassio", "recorder"], | ||||
|   "codeowners": ["@home-assistant/core"], | ||||
|   "codeowners": ["@home-assistant/core", "@ludeeus"], | ||||
|   "dependencies": ["api", "websocket_api", "http"], | ||||
|   "documentation": "https://www.home-assistant.io/integrations/analytics", | ||||
|   "integration_type": "system", | ||||
|   | ||||
| @@ -41,11 +41,6 @@ APPS_NEW_ID = "add_new" | ||||
| CONF_APP_DELETE = "app_delete" | ||||
| CONF_APP_ID = "app_id" | ||||
|  | ||||
| _EXAMPLE_APP_ID = "com.plexapp.android" | ||||
| _EXAMPLE_APP_PLAY_STORE_URL = ( | ||||
|     f"https://play.google.com/store/apps/details?id={_EXAMPLE_APP_ID}" | ||||
| ) | ||||
|  | ||||
| STEP_PAIR_DATA_SCHEMA = vol.Schema( | ||||
|     { | ||||
|         vol.Required("pin"): str, | ||||
| @@ -360,7 +355,5 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload): | ||||
|             data_schema=data_schema, | ||||
|             description_placeholders={ | ||||
|                 "app_id": f"`{app_id}`" if app_id != APPS_NEW_ID else "", | ||||
|                 "example_app_id": _EXAMPLE_APP_ID, | ||||
|                 "example_app_play_store_url": _EXAMPLE_APP_PLAY_STORE_URL, | ||||
|             }, | ||||
|         ) | ||||
|   | ||||
| @@ -75,7 +75,7 @@ | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "app_name": "Name of the application as you would like it to be displayed in Home Assistant.", | ||||
|           "app_id": "E.g. {example_app_id} for {example_app_play_store_url}", | ||||
|           "app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android", | ||||
|           "app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename", | ||||
|           "app_delete": "Check this box to delete the application from the list." | ||||
|         } | ||||
|   | ||||
| @@ -4,15 +4,12 @@ from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Mapping | ||||
| from functools import partial | ||||
| import json | ||||
| import logging | ||||
| from typing import Any, cast | ||||
|  | ||||
| import anthropic | ||||
| import voluptuous as vol | ||||
| from voluptuous_openapi import convert | ||||
|  | ||||
| from homeassistant.components.zone import ENTITY_ID_HOME | ||||
| from homeassistant.config_entries import ( | ||||
|     ConfigEntry, | ||||
|     ConfigEntryState, | ||||
| @@ -21,13 +18,7 @@ from homeassistant.config_entries import ( | ||||
|     ConfigSubentryFlow, | ||||
|     SubentryFlowResult, | ||||
| ) | ||||
| from homeassistant.const import ( | ||||
|     ATTR_LATITUDE, | ||||
|     ATTR_LONGITUDE, | ||||
|     CONF_API_KEY, | ||||
|     CONF_LLM_HASS_API, | ||||
|     CONF_NAME, | ||||
| ) | ||||
| from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME | ||||
| from homeassistant.core import HomeAssistant, callback | ||||
| from homeassistant.helpers import llm | ||||
| from homeassistant.helpers.selector import ( | ||||
| @@ -46,23 +37,12 @@ from .const import ( | ||||
|     CONF_RECOMMENDED, | ||||
|     CONF_TEMPERATURE, | ||||
|     CONF_THINKING_BUDGET, | ||||
|     CONF_WEB_SEARCH, | ||||
|     CONF_WEB_SEARCH_CITY, | ||||
|     CONF_WEB_SEARCH_COUNTRY, | ||||
|     CONF_WEB_SEARCH_MAX_USES, | ||||
|     CONF_WEB_SEARCH_REGION, | ||||
|     CONF_WEB_SEARCH_TIMEZONE, | ||||
|     CONF_WEB_SEARCH_USER_LOCATION, | ||||
|     DEFAULT_CONVERSATION_NAME, | ||||
|     DOMAIN, | ||||
|     RECOMMENDED_CHAT_MODEL, | ||||
|     RECOMMENDED_MAX_TOKENS, | ||||
|     RECOMMENDED_TEMPERATURE, | ||||
|     RECOMMENDED_THINKING_BUDGET, | ||||
|     RECOMMENDED_WEB_SEARCH, | ||||
|     RECOMMENDED_WEB_SEARCH_MAX_USES, | ||||
|     RECOMMENDED_WEB_SEARCH_USER_LOCATION, | ||||
|     WEB_SEARCH_UNSUPPORTED_MODELS, | ||||
| ) | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
| @@ -188,14 +168,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow): | ||||
|                 CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET | ||||
|             ) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS): | ||||
|                 errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" | ||||
|             if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH): | ||||
|                 model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) | ||||
|                 if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)): | ||||
|                     errors[CONF_WEB_SEARCH] = "web_search_unsupported_model" | ||||
|                 elif user_input.get( | ||||
|                     CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION | ||||
|                 ): | ||||
|                     user_input.update(await self._get_location_data()) | ||||
|  | ||||
|             if not errors: | ||||
|                 if self._is_new: | ||||
| @@ -243,68 +215,6 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow): | ||||
|             errors=errors or None, | ||||
|         ) | ||||
|  | ||||
|     async def _get_location_data(self) -> dict[str, str]: | ||||
|         """Get approximate location data of the user.""" | ||||
|         location_data: dict[str, str] = {} | ||||
|         zone_home = self.hass.states.get(ENTITY_ID_HOME) | ||||
|         if zone_home is not None: | ||||
|             client = await self.hass.async_add_executor_job( | ||||
|                 partial( | ||||
|                     anthropic.AsyncAnthropic, | ||||
|                     api_key=self._get_entry().data[CONF_API_KEY], | ||||
|                 ) | ||||
|             ) | ||||
|             location_schema = vol.Schema( | ||||
|                 { | ||||
|                     vol.Optional( | ||||
|                         CONF_WEB_SEARCH_CITY, | ||||
|                         description="Free text input for the city, e.g. `San Francisco`", | ||||
|                     ): str, | ||||
|                     vol.Optional( | ||||
|                         CONF_WEB_SEARCH_REGION, | ||||
|                         description="Free text input for the region, e.g. `California`", | ||||
|                     ): str, | ||||
|                 } | ||||
|             ) | ||||
|             response = await client.messages.create( | ||||
|                 model=RECOMMENDED_CHAT_MODEL, | ||||
|                 messages=[ | ||||
|                     { | ||||
|                         "role": "user", | ||||
|                         "content": "Where are the following coordinates located: " | ||||
|                         f"({zone_home.attributes[ATTR_LATITUDE]}," | ||||
|                         f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond " | ||||
|                         "only with a JSON object using the following schema:\n" | ||||
|                         f"{convert(location_schema)}", | ||||
|                     }, | ||||
|                     { | ||||
|                         "role": "assistant", | ||||
|                         "content": "{",  # hints the model to skip any preamble | ||||
|                     }, | ||||
|                 ], | ||||
|                 max_tokens=RECOMMENDED_MAX_TOKENS, | ||||
|             ) | ||||
|             _LOGGER.debug("Model response: %s", response.content) | ||||
|             location_data = location_schema( | ||||
|                 json.loads( | ||||
|                     "{" | ||||
|                     + "".join( | ||||
|                         block.text | ||||
|                         for block in response.content | ||||
|                         if isinstance(block, anthropic.types.TextBlock) | ||||
|                     ) | ||||
|                 ) | ||||
|                 or {} | ||||
|             ) | ||||
|  | ||||
|         if self.hass.config.country: | ||||
|             location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country | ||||
|         location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone | ||||
|  | ||||
|         _LOGGER.debug("Location data: %s", location_data) | ||||
|  | ||||
|         return location_data | ||||
|  | ||||
|     async_step_user = async_step_set_options | ||||
|     async_step_reconfigure = async_step_set_options | ||||
|  | ||||
| @@ -363,18 +273,6 @@ def anthropic_config_option_schema( | ||||
|                 CONF_THINKING_BUDGET, | ||||
|                 default=RECOMMENDED_THINKING_BUDGET, | ||||
|             ): int, | ||||
|             vol.Optional( | ||||
|                 CONF_WEB_SEARCH, | ||||
|                 default=RECOMMENDED_WEB_SEARCH, | ||||
|             ): bool, | ||||
|             vol.Optional( | ||||
|                 CONF_WEB_SEARCH_MAX_USES, | ||||
|                 default=RECOMMENDED_WEB_SEARCH_MAX_USES, | ||||
|             ): int, | ||||
|             vol.Optional( | ||||
|                 CONF_WEB_SEARCH_USER_LOCATION, | ||||
|                 default=RECOMMENDED_WEB_SEARCH_USER_LOCATION, | ||||
|             ): bool, | ||||
|         } | ||||
|     ) | ||||
|     return schema | ||||
|   | ||||
| @@ -18,26 +18,10 @@ RECOMMENDED_TEMPERATURE = 1.0 | ||||
| CONF_THINKING_BUDGET = "thinking_budget" | ||||
| RECOMMENDED_THINKING_BUDGET = 0 | ||||
| MIN_THINKING_BUDGET = 1024 | ||||
| CONF_WEB_SEARCH = "web_search" | ||||
| RECOMMENDED_WEB_SEARCH = False | ||||
| CONF_WEB_SEARCH_USER_LOCATION = "user_location" | ||||
| RECOMMENDED_WEB_SEARCH_USER_LOCATION = False | ||||
| CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses" | ||||
| RECOMMENDED_WEB_SEARCH_MAX_USES = 5 | ||||
| CONF_WEB_SEARCH_CITY = "city" | ||||
| CONF_WEB_SEARCH_REGION = "region" | ||||
| CONF_WEB_SEARCH_COUNTRY = "country" | ||||
| CONF_WEB_SEARCH_TIMEZONE = "timezone" | ||||
|  | ||||
| NON_THINKING_MODELS = [ | ||||
|     "claude-3-5",  # Both sonnet and haiku | ||||
|     "claude-3-opus", | ||||
|     "claude-3-haiku", | ||||
| ] | ||||
|  | ||||
| WEB_SEARCH_UNSUPPORTED_MODELS = [ | ||||
|     "claude-3-haiku", | ||||
|     "claude-3-opus", | ||||
|     "claude-3-5-sonnet-20240620", | ||||
|     "claude-3-5-sonnet-20241022", | ||||
| THINKING_MODELS = [ | ||||
|     "claude-3-7-sonnet", | ||||
|     "claude-sonnet-4-0", | ||||
|     "claude-opus-4-0", | ||||
|     "claude-opus-4-1", | ||||
| ] | ||||
|   | ||||
| @@ -1,17 +1,12 @@ | ||||
| """Base entity for Anthropic.""" | ||||
|  | ||||
| from collections.abc import AsyncGenerator, Callable, Iterable | ||||
| from dataclasses import dataclass, field | ||||
| import json | ||||
| from typing import Any | ||||
|  | ||||
| import anthropic | ||||
| from anthropic import AsyncStream | ||||
| from anthropic.types import ( | ||||
|     CitationsDelta, | ||||
|     CitationsWebSearchResultLocation, | ||||
|     CitationWebSearchResultLocationParam, | ||||
|     ContentBlockParam, | ||||
|     InputJSONDelta, | ||||
|     MessageDeltaUsage, | ||||
|     MessageParam, | ||||
| @@ -21,16 +16,11 @@ from anthropic.types import ( | ||||
|     RawContentBlockStopEvent, | ||||
|     RawMessageDeltaEvent, | ||||
|     RawMessageStartEvent, | ||||
|     RawMessageStopEvent, | ||||
|     RedactedThinkingBlock, | ||||
|     RedactedThinkingBlockParam, | ||||
|     ServerToolUseBlock, | ||||
|     ServerToolUseBlockParam, | ||||
|     SignatureDelta, | ||||
|     TextBlock, | ||||
|     TextBlockParam, | ||||
|     TextCitation, | ||||
|     TextCitationParam, | ||||
|     TextDelta, | ||||
|     ThinkingBlock, | ||||
|     ThinkingBlockParam, | ||||
| @@ -39,15 +29,9 @@ from anthropic.types import ( | ||||
|     ThinkingDelta, | ||||
|     ToolParam, | ||||
|     ToolResultBlockParam, | ||||
|     ToolUnionParam, | ||||
|     ToolUseBlock, | ||||
|     ToolUseBlockParam, | ||||
|     Usage, | ||||
|     WebSearchTool20250305Param, | ||||
|     WebSearchToolRequestErrorParam, | ||||
|     WebSearchToolResultBlock, | ||||
|     WebSearchToolResultBlockParam, | ||||
|     WebSearchToolResultError, | ||||
| ) | ||||
| from anthropic.types.message_create_params import MessageCreateParamsStreaming | ||||
| from voluptuous_openapi import convert | ||||
| @@ -64,21 +48,14 @@ from .const import ( | ||||
|     CONF_MAX_TOKENS, | ||||
|     CONF_TEMPERATURE, | ||||
|     CONF_THINKING_BUDGET, | ||||
|     CONF_WEB_SEARCH, | ||||
|     CONF_WEB_SEARCH_CITY, | ||||
|     CONF_WEB_SEARCH_COUNTRY, | ||||
|     CONF_WEB_SEARCH_MAX_USES, | ||||
|     CONF_WEB_SEARCH_REGION, | ||||
|     CONF_WEB_SEARCH_TIMEZONE, | ||||
|     CONF_WEB_SEARCH_USER_LOCATION, | ||||
|     DOMAIN, | ||||
|     LOGGER, | ||||
|     MIN_THINKING_BUDGET, | ||||
|     NON_THINKING_MODELS, | ||||
|     RECOMMENDED_CHAT_MODEL, | ||||
|     RECOMMENDED_MAX_TOKENS, | ||||
|     RECOMMENDED_TEMPERATURE, | ||||
|     RECOMMENDED_THINKING_BUDGET, | ||||
|     THINKING_MODELS, | ||||
| ) | ||||
|  | ||||
| # Max number of back and forth with the LLM to generate a response | ||||
| @@ -96,69 +73,6 @@ def _format_tool( | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class CitationDetails: | ||||
|     """Citation details for a content part.""" | ||||
|  | ||||
|     index: int = 0 | ||||
|     """Start position of the text.""" | ||||
|  | ||||
|     length: int = 0 | ||||
|     """Length of the relevant data.""" | ||||
|  | ||||
|     citations: list[TextCitationParam] = field(default_factory=list) | ||||
|     """Citations for the content part.""" | ||||
|  | ||||
|  | ||||
| @dataclass(slots=True) | ||||
| class ContentDetails: | ||||
|     """Native data for AssistantContent.""" | ||||
|  | ||||
|     citation_details: list[CitationDetails] = field(default_factory=list) | ||||
|  | ||||
|     def has_content(self) -> bool: | ||||
|         """Check if there is any content.""" | ||||
|         return any(detail.length > 0 for detail in self.citation_details) | ||||
|  | ||||
|     def has_citations(self) -> bool: | ||||
|         """Check if there are any citations.""" | ||||
|         return any(detail.citations for detail in self.citation_details) | ||||
|  | ||||
|     def add_citation_detail(self) -> None: | ||||
|         """Add a new citation detail.""" | ||||
|         if not self.citation_details or self.citation_details[-1].length > 0: | ||||
|             self.citation_details.append( | ||||
|                 CitationDetails( | ||||
|                     index=self.citation_details[-1].index | ||||
|                     + self.citation_details[-1].length | ||||
|                     if self.citation_details | ||||
|                     else 0 | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|     def add_citation(self, citation: TextCitation) -> None: | ||||
|         """Add a citation to the current detail.""" | ||||
|         if not self.citation_details: | ||||
|             self.citation_details.append(CitationDetails()) | ||||
|         citation_param: TextCitationParam | None = None | ||||
|         if isinstance(citation, CitationsWebSearchResultLocation): | ||||
|             citation_param = CitationWebSearchResultLocationParam( | ||||
|                 type="web_search_result_location", | ||||
|                 title=citation.title, | ||||
|                 url=citation.url, | ||||
|                 cited_text=citation.cited_text, | ||||
|                 encrypted_index=citation.encrypted_index, | ||||
|             ) | ||||
|         if citation_param: | ||||
|             self.citation_details[-1].citations.append(citation_param) | ||||
|  | ||||
|     def delete_empty(self) -> None: | ||||
|         """Delete empty citation details.""" | ||||
|         self.citation_details = [ | ||||
|             detail for detail in self.citation_details if detail.citations | ||||
|         ] | ||||
|  | ||||
|  | ||||
| def _convert_content( | ||||
|     chat_content: Iterable[conversation.Content], | ||||
| ) -> list[MessageParam]: | ||||
| @@ -167,31 +81,15 @@ def _convert_content( | ||||
|  | ||||
|     for content in chat_content: | ||||
|         if isinstance(content, conversation.ToolResultContent): | ||||
|             if content.tool_name == "web_search": | ||||
|                 tool_result_block: ContentBlockParam = WebSearchToolResultBlockParam( | ||||
|                     type="web_search_tool_result", | ||||
|                     tool_use_id=content.tool_call_id, | ||||
|                     content=content.tool_result["content"] | ||||
|                     if "content" in content.tool_result | ||||
|                     else WebSearchToolRequestErrorParam( | ||||
|                         type="web_search_tool_result_error", | ||||
|                         error_code=content.tool_result.get("error_code", "unavailable"),  # type: ignore[typeddict-item] | ||||
|                     ), | ||||
|                 ) | ||||
|                 external_tool = True | ||||
|             else: | ||||
|                 tool_result_block = ToolResultBlockParam( | ||||
|                     type="tool_result", | ||||
|                     tool_use_id=content.tool_call_id, | ||||
|                     content=json.dumps(content.tool_result), | ||||
|                 ) | ||||
|                 external_tool = False | ||||
|             if not messages or messages[-1]["role"] != ( | ||||
|                 "assistant" if external_tool else "user" | ||||
|             ): | ||||
|             tool_result_block = ToolResultBlockParam( | ||||
|                 type="tool_result", | ||||
|                 tool_use_id=content.tool_call_id, | ||||
|                 content=json.dumps(content.tool_result), | ||||
|             ) | ||||
|             if not messages or messages[-1]["role"] != "user": | ||||
|                 messages.append( | ||||
|                     MessageParam( | ||||
|                         role="assistant" if external_tool else "user", | ||||
|                         role="user", | ||||
|                         content=[tool_result_block], | ||||
|                     ) | ||||
|                 ) | ||||
| @@ -253,56 +151,13 @@ def _convert_content( | ||||
|                         redacted_thinking_block | ||||
|                     ) | ||||
|             if content.content: | ||||
|                 current_index = 0 | ||||
|                 for detail in ( | ||||
|                     content.native.citation_details | ||||
|                     if isinstance(content.native, ContentDetails) | ||||
|                     else [CitationDetails(length=len(content.content))] | ||||
|                 ): | ||||
|                     if detail.index > current_index: | ||||
|                         # Add text block for any text without citations | ||||
|                         messages[-1]["content"].append(  # type: ignore[union-attr] | ||||
|                             TextBlockParam( | ||||
|                                 type="text", | ||||
|                                 text=content.content[current_index : detail.index], | ||||
|                             ) | ||||
|                         ) | ||||
|                     messages[-1]["content"].append(  # type: ignore[union-attr] | ||||
|                         TextBlockParam( | ||||
|                             type="text", | ||||
|                             text=content.content[ | ||||
|                                 detail.index : detail.index + detail.length | ||||
|                             ], | ||||
|                             citations=detail.citations, | ||||
|                         ) | ||||
|                         if detail.citations | ||||
|                         else TextBlockParam( | ||||
|                             type="text", | ||||
|                             text=content.content[ | ||||
|                                 detail.index : detail.index + detail.length | ||||
|                             ], | ||||
|                         ) | ||||
|                     ) | ||||
|                     current_index = detail.index + detail.length | ||||
|                 if current_index < len(content.content): | ||||
|                     # Add text block for any remaining text without citations | ||||
|                     messages[-1]["content"].append(  # type: ignore[union-attr] | ||||
|                         TextBlockParam( | ||||
|                             type="text", | ||||
|                             text=content.content[current_index:], | ||||
|                         ) | ||||
|                     ) | ||||
|                 messages[-1]["content"].append(  # type: ignore[union-attr] | ||||
|                     TextBlockParam(type="text", text=content.content) | ||||
|                 ) | ||||
|             if content.tool_calls: | ||||
|                 messages[-1]["content"].extend(  # type: ignore[union-attr] | ||||
|                     [ | ||||
|                         ServerToolUseBlockParam( | ||||
|                             type="server_tool_use", | ||||
|                             id=tool_call.id, | ||||
|                             name="web_search", | ||||
|                             input=tool_call.tool_args, | ||||
|                         ) | ||||
|                         if tool_call.external and tool_call.tool_name == "web_search" | ||||
|                         else ToolUseBlockParam( | ||||
|                         ToolUseBlockParam( | ||||
|                             type="tool_use", | ||||
|                             id=tool_call.id, | ||||
|                             name=tool_call.tool_name, | ||||
| @@ -318,12 +173,10 @@ def _convert_content( | ||||
|     return messages | ||||
|  | ||||
|  | ||||
| async def _transform_stream(  # noqa: C901 - This is complex, but better to have it in one place | ||||
| async def _transform_stream( | ||||
|     chat_log: conversation.ChatLog, | ||||
|     stream: AsyncStream[MessageStreamEvent], | ||||
| ) -> AsyncGenerator[ | ||||
|     conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict | ||||
| ]: | ||||
| ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]: | ||||
|     """Transform the response stream into HA format. | ||||
|  | ||||
|     A typical stream of responses might look something like the following: | ||||
| @@ -356,13 +209,11 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|     if stream is None: | ||||
|         raise TypeError("Expected a stream of messages") | ||||
|  | ||||
|     current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None | ||||
|     current_tool_block: ToolUseBlockParam | None = None | ||||
|     current_tool_args: str | ||||
|     content_details = ContentDetails() | ||||
|     content_details.add_citation_detail() | ||||
|     input_usage: Usage | None = None | ||||
|     has_content = False | ||||
|     has_native = False | ||||
|     first_block: bool | ||||
|  | ||||
|     async for response in stream: | ||||
|         LOGGER.debug("Received response: %s", response) | ||||
| @@ -371,7 +222,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|             if response.message.role != "assistant": | ||||
|                 raise ValueError("Unexpected message role") | ||||
|             input_usage = response.message.usage | ||||
|             first_block = True | ||||
|         elif isinstance(response, RawContentBlockStartEvent): | ||||
|             if isinstance(response.content_block, ToolUseBlock): | ||||
|                 current_tool_block = ToolUseBlockParam( | ||||
| @@ -382,37 +232,17 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|                 ) | ||||
|                 current_tool_args = "" | ||||
|             elif isinstance(response.content_block, TextBlock): | ||||
|                 if (  # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead. | ||||
|                     first_block | ||||
|                     or ( | ||||
|                         not content_details.has_citations() | ||||
|                         and response.content_block.citations is None | ||||
|                         and content_details.has_content() | ||||
|                     ) | ||||
|                 ): | ||||
|                     if content_details.has_citations(): | ||||
|                         content_details.delete_empty() | ||||
|                         yield {"native": content_details} | ||||
|                     content_details = ContentDetails() | ||||
|                 if has_content: | ||||
|                     yield {"role": "assistant"} | ||||
|                     has_native = False | ||||
|                     first_block = False | ||||
|                 content_details.add_citation_detail() | ||||
|                 has_content = True | ||||
|                 if response.content_block.text: | ||||
|                     content_details.citation_details[-1].length += len( | ||||
|                         response.content_block.text | ||||
|                     ) | ||||
|                     yield {"content": response.content_block.text} | ||||
|             elif isinstance(response.content_block, ThinkingBlock): | ||||
|                 if first_block or has_native: | ||||
|                     if content_details.has_citations(): | ||||
|                         content_details.delete_empty() | ||||
|                         yield {"native": content_details} | ||||
|                     content_details = ContentDetails() | ||||
|                     content_details.add_citation_detail() | ||||
|                 if has_native: | ||||
|                     yield {"role": "assistant"} | ||||
|                     has_native = False | ||||
|                     first_block = False | ||||
|                     has_content = False | ||||
|             elif isinstance(response.content_block, RedactedThinkingBlock): | ||||
|                 LOGGER.debug( | ||||
|                     "Some of Claude’s internal reasoning has been automatically " | ||||
| @@ -420,60 +250,15 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|                     "responses" | ||||
|                 ) | ||||
|                 if has_native: | ||||
|                     if content_details.has_citations(): | ||||
|                         content_details.delete_empty() | ||||
|                         yield {"native": content_details} | ||||
|                     content_details = ContentDetails() | ||||
|                     content_details.add_citation_detail() | ||||
|                     yield {"role": "assistant"} | ||||
|                     has_native = False | ||||
|                     first_block = False | ||||
|                     has_content = False | ||||
|                 yield {"native": response.content_block} | ||||
|                 has_native = True | ||||
|             elif isinstance(response.content_block, ServerToolUseBlock): | ||||
|                 current_tool_block = ServerToolUseBlockParam( | ||||
|                     type="server_tool_use", | ||||
|                     id=response.content_block.id, | ||||
|                     name=response.content_block.name, | ||||
|                     input="", | ||||
|                 ) | ||||
|                 current_tool_args = "" | ||||
|             elif isinstance(response.content_block, WebSearchToolResultBlock): | ||||
|                 if content_details.has_citations(): | ||||
|                     content_details.delete_empty() | ||||
|                     yield {"native": content_details} | ||||
|                 content_details = ContentDetails() | ||||
|                 content_details.add_citation_detail() | ||||
|                 yield { | ||||
|                     "role": "tool_result", | ||||
|                     "tool_call_id": response.content_block.tool_use_id, | ||||
|                     "tool_name": "web_search", | ||||
|                     "tool_result": { | ||||
|                         "type": "web_search_tool_result_error", | ||||
|                         "error_code": response.content_block.content.error_code, | ||||
|                     } | ||||
|                     if isinstance( | ||||
|                         response.content_block.content, WebSearchToolResultError | ||||
|                     ) | ||||
|                     else { | ||||
|                         "content": [ | ||||
|                             { | ||||
|                                 "type": "web_search_result", | ||||
|                                 "encrypted_content": block.encrypted_content, | ||||
|                                 "page_age": block.page_age, | ||||
|                                 "title": block.title, | ||||
|                                 "url": block.url, | ||||
|                             } | ||||
|                             for block in response.content_block.content | ||||
|                         ] | ||||
|                     }, | ||||
|                 } | ||||
|                 first_block = True | ||||
|         elif isinstance(response, RawContentBlockDeltaEvent): | ||||
|             if isinstance(response.delta, InputJSONDelta): | ||||
|                 current_tool_args += response.delta.partial_json | ||||
|             elif isinstance(response.delta, TextDelta): | ||||
|                 content_details.citation_details[-1].length += len(response.delta.text) | ||||
|                 yield {"content": response.delta.text} | ||||
|             elif isinstance(response.delta, ThinkingDelta): | ||||
|                 yield {"thinking_content": response.delta.thinking} | ||||
| @@ -486,8 +271,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|                     ) | ||||
|                 } | ||||
|                 has_native = True | ||||
|             elif isinstance(response.delta, CitationsDelta): | ||||
|                 content_details.add_citation(response.delta.citation) | ||||
|         elif isinstance(response, RawContentBlockStopEvent): | ||||
|             if current_tool_block is not None: | ||||
|                 tool_args = json.loads(current_tool_args) if current_tool_args else {} | ||||
| @@ -498,7 +281,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|                             id=current_tool_block["id"], | ||||
|                             tool_name=current_tool_block["name"], | ||||
|                             tool_args=tool_args, | ||||
|                             external=current_tool_block["type"] == "server_tool_use", | ||||
|                         ) | ||||
|                     ] | ||||
|                 } | ||||
| @@ -508,12 +290,6 @@ async def _transform_stream(  # noqa: C901 - This is complex, but better to have | ||||
|                 chat_log.async_trace(_create_token_stats(input_usage, usage)) | ||||
|             if response.delta.stop_reason == "refusal": | ||||
|                 raise HomeAssistantError("Potential policy violation detected") | ||||
|         elif isinstance(response, RawMessageStopEvent): | ||||
|             if content_details.has_citations(): | ||||
|                 content_details.delete_empty() | ||||
|                 yield {"native": content_details} | ||||
|             content_details = ContentDetails() | ||||
|             content_details.add_citation_detail() | ||||
|  | ||||
|  | ||||
| def _create_token_stats( | ||||
| @@ -561,11 +337,21 @@ class AnthropicBaseLLMEntity(Entity): | ||||
|         """Generate an answer for the chat log.""" | ||||
|         options = self.subentry.data | ||||
|  | ||||
|         tools: list[ToolParam] | None = None | ||||
|         if chat_log.llm_api: | ||||
|             tools = [ | ||||
|                 _format_tool(tool, chat_log.llm_api.custom_serializer) | ||||
|                 for tool in chat_log.llm_api.tools | ||||
|             ] | ||||
|  | ||||
|         system = chat_log.content[0] | ||||
|         if not isinstance(system, conversation.SystemContent): | ||||
|             raise TypeError("First message must be a system message") | ||||
|         messages = _convert_content(chat_log.content[1:]) | ||||
|  | ||||
|         client = self.entry.runtime_data | ||||
|  | ||||
|         thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) | ||||
|         model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) | ||||
|  | ||||
|         model_args = MessageCreateParamsStreaming( | ||||
| @@ -575,10 +361,10 @@ class AnthropicBaseLLMEntity(Entity): | ||||
|             system=system.content, | ||||
|             stream=True, | ||||
|         ) | ||||
|  | ||||
|         thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) | ||||
|         if tools: | ||||
|             model_args["tools"] = tools | ||||
|         if ( | ||||
|             not model.startswith(tuple(NON_THINKING_MODELS)) | ||||
|             model.startswith(tuple(THINKING_MODELS)) | ||||
|             and thinking_budget >= MIN_THINKING_BUDGET | ||||
|         ): | ||||
|             model_args["thinking"] = ThinkingConfigEnabledParam( | ||||
| @@ -590,34 +376,6 @@ class AnthropicBaseLLMEntity(Entity): | ||||
|                 CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE | ||||
|             ) | ||||
|  | ||||
|         tools: list[ToolUnionParam] = [] | ||||
|         if chat_log.llm_api: | ||||
|             tools = [ | ||||
|                 _format_tool(tool, chat_log.llm_api.custom_serializer) | ||||
|                 for tool in chat_log.llm_api.tools | ||||
|             ] | ||||
|  | ||||
|         if options.get(CONF_WEB_SEARCH): | ||||
|             web_search = WebSearchTool20250305Param( | ||||
|                 name="web_search", | ||||
|                 type="web_search_20250305", | ||||
|                 max_uses=options.get(CONF_WEB_SEARCH_MAX_USES), | ||||
|             ) | ||||
|             if options.get(CONF_WEB_SEARCH_USER_LOCATION): | ||||
|                 web_search["user_location"] = { | ||||
|                     "type": "approximate", | ||||
|                     "city": options.get(CONF_WEB_SEARCH_CITY, ""), | ||||
|                     "region": options.get(CONF_WEB_SEARCH_REGION, ""), | ||||
|                     "country": options.get(CONF_WEB_SEARCH_COUNTRY, ""), | ||||
|                     "timezone": options.get(CONF_WEB_SEARCH_TIMEZONE, ""), | ||||
|                 } | ||||
|             tools.append(web_search) | ||||
|  | ||||
|         if tools: | ||||
|             model_args["tools"] = tools | ||||
|  | ||||
|         client = self.entry.runtime_data | ||||
|  | ||||
|         # To prevent infinite loops, we limit the number of iterations | ||||
|         for _iteration in range(MAX_TOOL_ITERATIONS): | ||||
|             try: | ||||
|   | ||||
| @@ -8,5 +8,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/anthropic", | ||||
|   "integration_type": "service", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "requirements": ["anthropic==0.69.0"] | ||||
|   "requirements": ["anthropic==0.62.0"] | ||||
| } | ||||
|   | ||||
| @@ -35,17 +35,11 @@ | ||||
|             "temperature": "Temperature", | ||||
|             "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", | ||||
|             "recommended": "Recommended model settings", | ||||
|             "thinking_budget": "Thinking budget", | ||||
|             "web_search": "Enable web search", | ||||
|             "web_search_max_uses": "Maximum web searches", | ||||
|             "user_location": "Include home location" | ||||
|             "thinking_budget_tokens": "Thinking budget" | ||||
|           }, | ||||
|           "data_description": { | ||||
|             "prompt": "Instruct how the LLM should respond. This can be a template.", | ||||
|             "thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.", | ||||
|             "web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff", | ||||
|             "web_search_max_uses": "Limit the number of searches performed per response", | ||||
|             "user_location": "Localize search results based on home location" | ||||
|             "thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking." | ||||
|           } | ||||
|         } | ||||
|       }, | ||||
| @@ -54,8 +48,7 @@ | ||||
|         "entry_not_loaded": "Cannot add things while the configuration is disabled." | ||||
|       }, | ||||
|       "error": { | ||||
|         "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.", | ||||
|         "web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search." | ||||
|         "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget." | ||||
|       } | ||||
|     } | ||||
|   } | ||||
|   | ||||
| @@ -5,9 +5,14 @@ from __future__ import annotations | ||||
| import asyncio | ||||
| import logging | ||||
| from random import randrange | ||||
| import sys | ||||
| from typing import Any, cast | ||||
|  | ||||
| from pyatv import connect, exceptions, scan | ||||
| from pyatv.conf import AppleTV | ||||
| from pyatv.const import DeviceModel, Protocol | ||||
| from pyatv.convert import model_str | ||||
| from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener | ||||
|  | ||||
| from homeassistant.components import zeroconf | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import ( | ||||
| @@ -24,11 +29,7 @@ from homeassistant.const import ( | ||||
|     Platform, | ||||
| ) | ||||
| from homeassistant.core import Event, HomeAssistant, callback | ||||
| from homeassistant.exceptions import ( | ||||
|     ConfigEntryAuthFailed, | ||||
|     ConfigEntryNotReady, | ||||
|     HomeAssistantError, | ||||
| ) | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady | ||||
| from homeassistant.helpers import device_registry as dr | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.dispatcher import async_dispatcher_send | ||||
| @@ -42,18 +43,6 @@ from .const import ( | ||||
|     SIGNAL_DISCONNECTED, | ||||
| ) | ||||
|  | ||||
| if sys.version_info < (3, 14): | ||||
|     from pyatv import connect, exceptions, scan | ||||
|     from pyatv.conf import AppleTV | ||||
|     from pyatv.const import DeviceModel, Protocol | ||||
|     from pyatv.convert import model_str | ||||
|     from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener | ||||
| else: | ||||
|  | ||||
|     class DeviceListener: | ||||
|         """Dummy class.""" | ||||
|  | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| DEFAULT_NAME_TV = "Apple TV" | ||||
| @@ -64,41 +53,31 @@ BACKOFF_TIME_UPPER_LIMIT = 300  # Five minutes | ||||
|  | ||||
| PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] | ||||
|  | ||||
| if sys.version_info < (3, 14): | ||||
|     AUTH_EXCEPTIONS = ( | ||||
|         exceptions.AuthenticationError, | ||||
|         exceptions.InvalidCredentialsError, | ||||
|         exceptions.NoCredentialsError, | ||||
|     ) | ||||
|     CONNECTION_TIMEOUT_EXCEPTIONS = ( | ||||
|         OSError, | ||||
|         asyncio.CancelledError, | ||||
|         TimeoutError, | ||||
|         exceptions.ConnectionLostError, | ||||
|         exceptions.ConnectionFailedError, | ||||
|     ) | ||||
|     DEVICE_EXCEPTIONS = ( | ||||
|         exceptions.ProtocolError, | ||||
|         exceptions.NoServiceError, | ||||
|         exceptions.PairingError, | ||||
|         exceptions.BackOffError, | ||||
|         exceptions.DeviceIdMissingError, | ||||
|     ) | ||||
| else: | ||||
|     AUTH_EXCEPTIONS = () | ||||
|     CONNECTION_TIMEOUT_EXCEPTIONS = () | ||||
|     DEVICE_EXCEPTIONS = () | ||||
|  | ||||
| AUTH_EXCEPTIONS = ( | ||||
|     exceptions.AuthenticationError, | ||||
|     exceptions.InvalidCredentialsError, | ||||
|     exceptions.NoCredentialsError, | ||||
| ) | ||||
| CONNECTION_TIMEOUT_EXCEPTIONS = ( | ||||
|     OSError, | ||||
|     asyncio.CancelledError, | ||||
|     TimeoutError, | ||||
|     exceptions.ConnectionLostError, | ||||
|     exceptions.ConnectionFailedError, | ||||
| ) | ||||
| DEVICE_EXCEPTIONS = ( | ||||
|     exceptions.ProtocolError, | ||||
|     exceptions.NoServiceError, | ||||
|     exceptions.PairingError, | ||||
|     exceptions.BackOffError, | ||||
|     exceptions.DeviceIdMissingError, | ||||
| ) | ||||
|  | ||||
| type AppleTvConfigEntry = ConfigEntry[AppleTVManager] | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool: | ||||
|     """Set up a config entry for Apple TV.""" | ||||
|     if sys.version_info >= (3, 14): | ||||
|         raise HomeAssistantError( | ||||
|             "Apple TV is not supported on Python 3.14. Please use Python 3.13." | ||||
|         ) | ||||
|     manager = AppleTVManager(hass, entry) | ||||
|  | ||||
|     if manager.is_on: | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user