mirror of
				https://github.com/home-assistant/core.git
				synced 2025-10-31 06:29:31 +00:00 
			
		
		
		
	Compare commits
	
		
			183 Commits
		
	
	
		
			mqtt-subsc
			...
			2025.10.4
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | c960bd2845 | ||
|   | 5679ab0f86 | ||
|   | 2761dcbc48 | ||
|   | 3a00d96571 | ||
|   | c86ad896b8 | ||
|   | 9c1d8747be | ||
|   | 4a003114bd | ||
|   | dcc3f14b1f | ||
|   | 7687d5ea48 | ||
|   | 27cc3c838a | ||
|   | 619cb91839 | ||
|   | 5e5e130d4e | ||
|   | f6ac23cc58 | ||
|   | 244b6437b2 | ||
|   | 5dc271b201 | ||
|   | 531cc3e1ce | ||
|   | ed7c3cb339 | ||
|   | 16e11ed801 | ||
|   | 32a7bf4dbb | ||
|   | 856c99dc22 | ||
|   | a50b00b3c2 | ||
|   | 1df8b1063b | ||
|   | 32cd4364f6 | ||
|   | 0828a842a5 | ||
|   | f63a527a01 | ||
|   | 254a9ecc25 | ||
|   | a518907b09 | ||
|   | cd85699151 | ||
|   | f49dfbd459 | ||
|   | 3ed70bb751 | ||
|   | b4b1065737 | ||
|   | 7267c3c04e | ||
|   | 6ac4d2dd59 | ||
|   | 03abd5d277 | ||
|   | 66bb0db08b | ||
|   | 56ae579e83 | ||
|   | add1915b8a | ||
|   | 18ef4af8d0 | ||
|   | 3c6788212f | ||
|   | dbd8b1bc19 | ||
|   | d135f1c110 | ||
|   | bb98ed6633 | ||
|   | 59dace572a | ||
|   | 735cf36a5b | ||
|   | 90b0f50b8f | ||
|   | e731c07b77 | ||
|   | 2c75635e95 | ||
|   | 1f031695c2 | ||
|   | fb279212a9 | ||
|   | 45869523d0 | ||
|   | a753926f22 | ||
|   | dc874ff53a | ||
|   | 3ef6865708 | ||
|   | 7f1989f9f2 | ||
|   | 97e338c760 | ||
|   | 101679c17d | ||
|   | bc784c356e | ||
|   | 556cc57d8b | ||
|   | eef6e96a93 | ||
|   | 56d237af7f | ||
|   | e5d1902d2a | ||
|   | a9a203678e | ||
|   | 7f6237cc63 | ||
|   | 5468e691ca | ||
|   | 67cbbc3522 | ||
|   | 504da54c11 | ||
|   | cdda2ef5c8 | ||
|   | f405f9eb4b | ||
|   | 634f71835a | ||
|   | 49bfb01fac | ||
|   | ad8f7fdcab | ||
|   | f82ec81062 | ||
|   | 03b0842a01 | ||
|   | 13e5cb5cc8 | ||
|   | f18cdaf4d8 | ||
|   | 5b3bca1426 | ||
|   | d812e9d43c | ||
|   | fa1071b221 | ||
|   | e48c2c6c0b | ||
|   | bddd4100c0 | ||
|   | 70d8df2e95 | ||
|   | 08b3dd0173 | ||
|   | 6723a7c4e1 | ||
|   | 40d7f2a89e | ||
|   | 13b717e2da | ||
|   | 5fcfd3ad84 | ||
|   | 324a7b5443 | ||
|   | 491ae8f72c | ||
|   | 259247892f | ||
|   | caeda0ef64 | ||
|   | df35c535e4 | ||
|   | f93b9e0ed0 | ||
|   | 48a3372cf2 | ||
|   | d84fd72428 | ||
|   | e8cb386962 | ||
|   | 5ac726703c | ||
|   | 688649a799 | ||
|   | c5359ade3e | ||
|   | 4e60dedc1b | ||
|   | 221d74f83a | ||
|   | fbbb3d6415 | ||
|   | 8297019011 | ||
|   | 61715dcff3 | ||
|   | 32b822ee99 | ||
|   | e6c2e0ad80 | ||
|   | 1314427dc5 | ||
|   | bf499a45f7 | ||
|   | b955e22628 | ||
|   | 1b222ff5fd | ||
|   | f0510e703f | ||
|   | cbe3956e15 | ||
|   | 4588e9da8d | ||
|   | 5445890fdf | ||
|   | 9b49f77f86 | ||
|   | 566c8fb786 | ||
|   | b36150c213 | ||
|   | 809070d2ad | ||
|   | f4339dc031 | ||
|   | f3b37d24b0 | ||
|   | 4c8348caa7 | ||
|   | b9e7c102ea | ||
|   | 69d9fa89b7 | ||
|   | 6f3f5a5ec1 | ||
|   | 5ecfeca90a | ||
|   | 00e0570fd4 | ||
|   | 5a5b94f3af | ||
|   | 34f00d9b33 | ||
|   | 4cabc5b368 | ||
|   | 4045125422 | ||
|   | d7393af76f | ||
|   | ad41386b27 | ||
|   | 62d17ea20c | ||
|   | c4954731d0 | ||
|   | 647723d3f0 | ||
|   | 51c500e22c | ||
|   | f6fc13c1f2 | ||
|   | 0009a7a042 | ||
|   | a3d1aa28e7 | ||
|   | 9f53eb9b76 | ||
|   | f53a205ff3 | ||
|   | d08517c3df | ||
|   | d7398a44a1 | ||
|   | 9acfc0cb88 | ||
|   | 1b3d21523a | ||
|   | 1d407d1326 | ||
|   | 013346cead | ||
|   | 5abaabc9da | ||
|   | 32481312c3 | ||
|   | bdc9eb37d3 | ||
|   | e0afcbc02b | ||
|   | cd56a6a98d | ||
|   | 9d85893bbb | ||
|   | 9e8a70225f | ||
|   | 96ec795d5e | ||
|   | 65b796070d | ||
|   | 32994812e5 | ||
|   | 66ff9d63a3 | ||
|   | b2a63d4996 | ||
|   | f9f37b7f2a | ||
|   | 7bdd9dd38a | ||
|   | 1e8aae0a89 | ||
|   | cf668e9dc2 | ||
|   | 2e91c8700f | ||
|   | 9d14627daa | ||
|   | 73b8283748 | ||
|   | edeaaa2e63 | ||
|   | d26dd8fc39 | ||
|   | 34640ea735 | ||
|   | 46a2e21ef0 | ||
|   | 508af53e72 | ||
|   | 5f7440608c | ||
|   | 0d1aa38a26 | ||
|   | 929f8c148a | ||
|   | 92db1f5a04 | ||
|   | e66b5ce0bf | ||
|   | 1e17150e9f | ||
|   | 792902de3d | ||
|   | 04d78c3dd5 | ||
|   | 5c8d5bfb84 | ||
|   | 99bff31869 | ||
|   | d949119fb0 | ||
|   | e7b737ece5 | ||
|   | fb8ddac2e8 | 
							
								
								
									
										10
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/workflows/builder.yml
									
									
									
									
										vendored
									
									
								
							| @@ -190,7 +190,7 @@ jobs: | ||||
|           echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -257,7 +257,7 @@ jobs: | ||||
|           fi | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -332,14 +332,14 @@ jobs: | ||||
|  | ||||
|       - name: Login to DockerHub | ||||
|         if: matrix.registry == 'docker.io/homeassistant' | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         if: matrix.registry == 'ghcr.io/home-assistant' | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -504,7 +504,7 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
|   | ||||
							
								
								
									
										716
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										716
									
								
								.github/workflows/ci.yaml
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @@ -24,11 +24,11 @@ jobs: | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|  | ||||
|       - name: Initialize CodeQL | ||||
|         uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6 | ||||
|         uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 | ||||
|         with: | ||||
|           languages: python | ||||
|  | ||||
|       - name: Perform CodeQL Analysis | ||||
|         uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6 | ||||
|         uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 | ||||
|         with: | ||||
|           category: "/language:python" | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							| @@ -17,7 +17,7 @@ jobs: | ||||
|       # - No PRs marked as no-stale | ||||
|       # - No issues (-1) | ||||
|       - name: 60 days stale PRs policy | ||||
|         uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 | ||||
|         uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ secrets.GITHUB_TOKEN }} | ||||
|           days-before-stale: 60 | ||||
| @@ -57,7 +57,7 @@ jobs: | ||||
|       # - No issues marked as no-stale or help-wanted | ||||
|       # - No PRs (-1) | ||||
|       - name: 90 days stale issues | ||||
|         uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 | ||||
|         uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ steps.token.outputs.token }} | ||||
|           days-before-stale: 90 | ||||
| @@ -87,7 +87,7 @@ jobs: | ||||
|       # - No Issues marked as no-stale or help-wanted | ||||
|       # - No PRs (-1) | ||||
|       - name: Needs more information stale issues policy | ||||
|         uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 | ||||
|         uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0 | ||||
|         with: | ||||
|           repo-token: ${{ steps.token.outputs.token }} | ||||
|           only-labels: "needs-more-information" | ||||
|   | ||||
| @@ -203,7 +203,6 @@ homeassistant.components.feedreader.* | ||||
| homeassistant.components.file_upload.* | ||||
| homeassistant.components.filesize.* | ||||
| homeassistant.components.filter.* | ||||
| homeassistant.components.firefly_iii.* | ||||
| homeassistant.components.fitbit.* | ||||
| homeassistant.components.flexit_bacnet.* | ||||
| homeassistant.components.flux_led.* | ||||
| @@ -326,7 +325,6 @@ homeassistant.components.london_underground.* | ||||
| homeassistant.components.lookin.* | ||||
| homeassistant.components.lovelace.* | ||||
| homeassistant.components.luftdaten.* | ||||
| homeassistant.components.lunatone.* | ||||
| homeassistant.components.madvr.* | ||||
| homeassistant.components.manual.* | ||||
| homeassistant.components.mastodon.* | ||||
| @@ -555,7 +553,6 @@ homeassistant.components.vacuum.* | ||||
| homeassistant.components.vallox.* | ||||
| homeassistant.components.valve.* | ||||
| homeassistant.components.velbus.* | ||||
| homeassistant.components.vivotek.* | ||||
| homeassistant.components.vlc_telnet.* | ||||
| homeassistant.components.vodafone_station.* | ||||
| homeassistant.components.volvo.* | ||||
|   | ||||
							
								
								
									
										14
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										14
									
								
								CODEOWNERS
									
									
									
										generated
									
									
									
								
							| @@ -492,8 +492,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/filesize/ @gjohansson-ST | ||||
| /homeassistant/components/filter/ @dgomes | ||||
| /tests/components/filter/ @dgomes | ||||
| /homeassistant/components/firefly_iii/ @erwindouna | ||||
| /tests/components/firefly_iii/ @erwindouna | ||||
| /homeassistant/components/fireservicerota/ @cyberjunky | ||||
| /tests/components/fireservicerota/ @cyberjunky | ||||
| /homeassistant/components/firmata/ @DaAwesomeP | ||||
| @@ -762,8 +760,8 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | ||||
| /tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz | ||||
| /homeassistant/components/intesishome/ @jnimmo | ||||
| /homeassistant/components/iometer/ @MaestroOnICe | ||||
| /tests/components/iometer/ @MaestroOnICe | ||||
| /homeassistant/components/iometer/ @jukrebs | ||||
| /tests/components/iometer/ @jukrebs | ||||
| /homeassistant/components/ios/ @robbiet480 | ||||
| /tests/components/ios/ @robbiet480 | ||||
| /homeassistant/components/iotawatt/ @gtdiehl @jyavenard | ||||
| @@ -910,8 +908,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/luci/ @mzdrale | ||||
| /homeassistant/components/luftdaten/ @fabaff @frenck | ||||
| /tests/components/luftdaten/ @fabaff @frenck | ||||
| /homeassistant/components/lunatone/ @MoonDevLT | ||||
| /tests/components/lunatone/ @MoonDevLT | ||||
| /homeassistant/components/lupusec/ @majuss @suaveolent | ||||
| /tests/components/lupusec/ @majuss @suaveolent | ||||
| /homeassistant/components/lutron/ @cdheiser @wilburCForce | ||||
| @@ -957,8 +953,6 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/met_eireann/ @DylanGore | ||||
| /homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame | ||||
| /tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame | ||||
| /homeassistant/components/meteo_lt/ @xE1H | ||||
| /tests/components/meteo_lt/ @xE1H | ||||
| /homeassistant/components/meteoalarm/ @rolfberkenbosch | ||||
| /homeassistant/components/meteoclimatic/ @adrianmo | ||||
| /tests/components/meteoclimatic/ @adrianmo | ||||
| @@ -1065,8 +1059,6 @@ build.json @home-assistant/supervisor | ||||
| /homeassistant/components/nilu/ @hfurubotten | ||||
| /homeassistant/components/nina/ @DeerMaximum | ||||
| /tests/components/nina/ @DeerMaximum | ||||
| /homeassistant/components/nintendo_parental/ @pantherale0 | ||||
| /tests/components/nintendo_parental/ @pantherale0 | ||||
| /homeassistant/components/nissan_leaf/ @filcole | ||||
| /homeassistant/components/noaa_tides/ @jdelaney72 | ||||
| /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe | ||||
| @@ -1198,6 +1190,8 @@ build.json @home-assistant/supervisor | ||||
| /tests/components/plex/ @jjlawren | ||||
| /homeassistant/components/plugwise/ @CoMPaTech @bouwew | ||||
| /tests/components/plugwise/ @CoMPaTech @bouwew | ||||
| /homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa | ||||
| /tests/components/plum_lightpad/ @ColinHarrington @prystupa | ||||
| /homeassistant/components/point/ @fredrike | ||||
| /tests/components/point/ @fredrike | ||||
| /homeassistant/components/pooldose/ @lmaertin | ||||
|   | ||||
| @@ -34,6 +34,9 @@ INPUT_FIELD_CODE = "code" | ||||
|  | ||||
| DUMMY_SECRET = "FPPTH34D4E3MI2HG" | ||||
|  | ||||
| GOOGLE_AUTHENTICATOR_URL = "https://support.google.com/accounts/answer/1066447" | ||||
| AUTHY_URL = "https://authy.com/" | ||||
|  | ||||
|  | ||||
| def _generate_qr_code(data: str) -> str: | ||||
|     """Generate a base64 PNG string represent QR Code image of data.""" | ||||
| @@ -229,6 +232,8 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]): | ||||
|                 "code": self._ota_secret, | ||||
|                 "url": self._url, | ||||
|                 "qr_code": self._image, | ||||
|                 "google_authenticator_url": GOOGLE_AUTHENTICATOR_URL, | ||||
|                 "authy_url": AUTHY_URL, | ||||
|             }, | ||||
|             errors=errors, | ||||
|         ) | ||||
|   | ||||
| @@ -616,34 +616,34 @@ async def async_enable_logging( | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|     logger = logging.getLogger() | ||||
|     logger.setLevel(logging.INFO if verbose else logging.WARNING) | ||||
|  | ||||
|     # Log errors to a file if we have write access to file or config dir | ||||
|     if log_file is None: | ||||
|         default_log_path = hass.config.path(ERROR_LOG_FILENAME) | ||||
|         if "SUPERVISOR" in os.environ: | ||||
|             _LOGGER.info("Running in Supervisor, not logging to file") | ||||
|             # Rename the default log file if it exists, since previous versions created | ||||
|             # it even on Supervisor | ||||
|             if os.path.isfile(default_log_path): | ||||
|                 with contextlib.suppress(OSError): | ||||
|                     os.rename(default_log_path, f"{default_log_path}.old") | ||||
|             err_log_path = None | ||||
|         else: | ||||
|             err_log_path = default_log_path | ||||
|         err_log_path = hass.config.path(ERROR_LOG_FILENAME) | ||||
|     else: | ||||
|         err_log_path = os.path.abspath(log_file) | ||||
|  | ||||
|     if err_log_path: | ||||
|     err_path_exists = os.path.isfile(err_log_path) | ||||
|     err_dir = os.path.dirname(err_log_path) | ||||
|  | ||||
|     # Check if we can write to the error log if it exists or that | ||||
|     # we can create files in the containing directory if not. | ||||
|     if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( | ||||
|         not err_path_exists and os.access(err_dir, os.W_OK) | ||||
|     ): | ||||
|         err_handler = await hass.async_add_executor_job( | ||||
|             _create_log_file, err_log_path, log_rotate_days | ||||
|         ) | ||||
|  | ||||
|         err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) | ||||
|  | ||||
|         logger = logging.getLogger() | ||||
|         logger.addHandler(err_handler) | ||||
|         logger.setLevel(logging.INFO if verbose else logging.WARNING) | ||||
|  | ||||
|         # Save the log file location for access by other components. | ||||
|         hass.data[DATA_LOGGING] = err_log_path | ||||
|     else: | ||||
|         _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path) | ||||
|  | ||||
|     async_activate_log_queue_handler(hass) | ||||
|  | ||||
|   | ||||
							
								
								
									
										5
									
								
								homeassistant/brands/ibm.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								homeassistant/brands/ibm.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| { | ||||
|   "domain": "ibm", | ||||
|   "name": "IBM", | ||||
|   "integrations": ["watson_iot", "watson_tts"] | ||||
| } | ||||
| @@ -12,13 +12,11 @@ from homeassistant.components.bluetooth import async_get_scanner | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_ADDRESS | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.debounce import Debouncer | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator | ||||
|  | ||||
| from .const import CONF_IS_NEW_STYLE_SCALE | ||||
|  | ||||
| SCAN_INTERVAL = timedelta(seconds=15) | ||||
| UPDATE_DEBOUNCE_TIME = 0.2 | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -40,19 +38,11 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]): | ||||
|             config_entry=entry, | ||||
|         ) | ||||
|  | ||||
|         debouncer = Debouncer( | ||||
|             hass=hass, | ||||
|             logger=_LOGGER, | ||||
|             cooldown=UPDATE_DEBOUNCE_TIME, | ||||
|             immediate=True, | ||||
|             function=self.async_update_listeners, | ||||
|         ) | ||||
|  | ||||
|         self._scale = AcaiaScale( | ||||
|             address_or_ble_device=entry.data[CONF_ADDRESS], | ||||
|             name=entry.title, | ||||
|             is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], | ||||
|             notify_callback=debouncer.async_schedule_call, | ||||
|             notify_callback=self.async_update_listeners, | ||||
|             scanner=async_get_scanner(hass), | ||||
|         ) | ||||
|  | ||||
|   | ||||
| @@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = { | ||||
| } | ||||
| UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10) | ||||
| UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) | ||||
| UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30) | ||||
| UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30) | ||||
|   | ||||
| @@ -1,6 +1,9 @@ | ||||
| { | ||||
|   "entity": { | ||||
|     "sensor": { | ||||
|       "air_quality": { | ||||
|         "default": "mdi:air-filter" | ||||
|       }, | ||||
|       "cloud_ceiling": { | ||||
|         "default": "mdi:weather-fog" | ||||
|       }, | ||||
| @@ -34,9 +37,6 @@ | ||||
|       "thunderstorm_probability_night": { | ||||
|         "default": "mdi:weather-lightning" | ||||
|       }, | ||||
|       "translation_key": { | ||||
|         "default": "mdi:air-filter" | ||||
|       }, | ||||
|       "tree_pollen": { | ||||
|         "default": "mdi:tree-outline" | ||||
|       }, | ||||
|   | ||||
| @@ -1,7 +1,9 @@ | ||||
| """Airgradient Update platform.""" | ||||
|  | ||||
| from datetime import timedelta | ||||
| import logging | ||||
|  | ||||
| from airgradient import AirGradientConnectionError | ||||
| from propcache.api import cached_property | ||||
|  | ||||
| from homeassistant.components.update import UpdateDeviceClass, UpdateEntity | ||||
| @@ -13,6 +15,7 @@ from .entity import AirGradientEntity | ||||
|  | ||||
| PARALLEL_UPDATES = 1 | ||||
| SCAN_INTERVAL = timedelta(hours=1) | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| async def async_setup_entry( | ||||
| @@ -31,6 +34,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): | ||||
|     """Representation of Airgradient Update.""" | ||||
|  | ||||
|     _attr_device_class = UpdateDeviceClass.FIRMWARE | ||||
|     _server_unreachable_logged = False | ||||
|  | ||||
|     def __init__(self, coordinator: AirGradientCoordinator) -> None: | ||||
|         """Initialize the entity.""" | ||||
| @@ -47,10 +51,27 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): | ||||
|         """Return the installed version of the entity.""" | ||||
|         return self.coordinator.data.measures.firmware_version | ||||
|  | ||||
|     @property | ||||
|     def available(self) -> bool: | ||||
|         """Return if entity is available.""" | ||||
|         return super().available and self._attr_available | ||||
|  | ||||
|     async def async_update(self) -> None: | ||||
|         """Update the entity.""" | ||||
|         self._attr_latest_version = ( | ||||
|             await self.coordinator.client.get_latest_firmware_version( | ||||
|                 self.coordinator.serial_number | ||||
|         try: | ||||
|             self._attr_latest_version = ( | ||||
|                 await self.coordinator.client.get_latest_firmware_version( | ||||
|                     self.coordinator.serial_number | ||||
|                 ) | ||||
|             ) | ||||
|         ) | ||||
|         except AirGradientConnectionError: | ||||
|             self._attr_latest_version = None | ||||
|             self._attr_available = False | ||||
|             if not self._server_unreachable_logged: | ||||
|                 _LOGGER.error( | ||||
|                     "Unable to connect to AirGradient server to check for updates" | ||||
|                 ) | ||||
|                 self._server_unreachable_logged = True | ||||
|         else: | ||||
|             self._server_unreachable_logged = False | ||||
|             self._attr_available = True | ||||
|   | ||||
| @@ -26,6 +26,10 @@ from .const import DOMAIN | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| # Documentation URL for API key generation | ||||
| _API_KEY_URL = "https://docs.airnowapi.org/account/request/" | ||||
|  | ||||
|  | ||||
| async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool: | ||||
|     """Validate the user input allows us to connect. | ||||
|  | ||||
| @@ -114,6 +118,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                     ), | ||||
|                 } | ||||
|             ), | ||||
|             description_placeholders={"api_key_url": _API_KEY_URL}, | ||||
|             errors=errors, | ||||
|         ) | ||||
|  | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
|   "config": { | ||||
|     "step": { | ||||
|       "user": { | ||||
|         "description": "To generate API key go to https://docs.airnowapi.org/account/request/", | ||||
|         "description": "To generate API key go to {api_key_url}", | ||||
|         "data": { | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||
|           "latitude": "[%key:common::config_flow::data::latitude%]", | ||||
|   | ||||
| @@ -2,7 +2,6 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Mapping | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| @@ -15,7 +14,7 @@ from airos.exceptions import ( | ||||
| ) | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.config_entries import ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import ( | ||||
|     CONF_HOST, | ||||
|     CONF_PASSWORD, | ||||
| @@ -25,11 +24,6 @@ from homeassistant.const import ( | ||||
| ) | ||||
| from homeassistant.data_entry_flow import section | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.selector import ( | ||||
|     TextSelector, | ||||
|     TextSelectorConfig, | ||||
|     TextSelectorType, | ||||
| ) | ||||
|  | ||||
| from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS | ||||
| from .coordinator import AirOS8 | ||||
| @@ -60,107 +54,50 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     VERSION = 1 | ||||
|     MINOR_VERSION = 2 | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize the config flow.""" | ||||
|         super().__init__() | ||||
|         self.airos_device: AirOS8 | ||||
|         self.errors: dict[str, str] = {} | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|         self, | ||||
|         user_input: dict[str, Any] | None = None, | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the manual input of host and credentials.""" | ||||
|         self.errors = {} | ||||
|         """Handle the initial step.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         if user_input is not None: | ||||
|             validated_info = await self._validate_and_get_device_info(user_input) | ||||
|             if validated_info: | ||||
|                 return self.async_create_entry( | ||||
|                     title=validated_info["title"], | ||||
|                     data=validated_info["data"], | ||||
|                 ) | ||||
|         return self.async_show_form( | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors | ||||
|         ) | ||||
|             # By default airOS 8 comes with self-signed SSL certificates, | ||||
|             # with no option in the web UI to change or upload a custom certificate. | ||||
|             session = async_get_clientsession( | ||||
|                 self.hass, | ||||
|                 verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL], | ||||
|             ) | ||||
|  | ||||
|     async def _validate_and_get_device_info( | ||||
|         self, config_data: dict[str, Any] | ||||
|     ) -> dict[str, Any] | None: | ||||
|         """Validate user input with the device API.""" | ||||
|         # By default airOS 8 comes with self-signed SSL certificates, | ||||
|         # with no option in the web UI to change or upload a custom certificate. | ||||
|         session = async_get_clientsession( | ||||
|             self.hass, | ||||
|             verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL], | ||||
|         ) | ||||
|             airos_device = AirOS8( | ||||
|                 host=user_input[CONF_HOST], | ||||
|                 username=user_input[CONF_USERNAME], | ||||
|                 password=user_input[CONF_PASSWORD], | ||||
|                 session=session, | ||||
|                 use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL], | ||||
|             ) | ||||
|             try: | ||||
|                 await airos_device.login() | ||||
|                 airos_data = await airos_device.status() | ||||
|  | ||||
|         airos_device = AirOS8( | ||||
|             host=config_data[CONF_HOST], | ||||
|             username=config_data[CONF_USERNAME], | ||||
|             password=config_data[CONF_PASSWORD], | ||||
|             session=session, | ||||
|             use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL], | ||||
|         ) | ||||
|         try: | ||||
|             await airos_device.login() | ||||
|             airos_data = await airos_device.status() | ||||
|  | ||||
|         except ( | ||||
|             AirOSConnectionSetupError, | ||||
|             AirOSDeviceConnectionError, | ||||
|         ): | ||||
|             self.errors["base"] = "cannot_connect" | ||||
|         except (AirOSConnectionAuthenticationError, AirOSDataMissingError): | ||||
|             self.errors["base"] = "invalid_auth" | ||||
|         except AirOSKeyDataMissingError: | ||||
|             self.errors["base"] = "key_data_missing" | ||||
|         except Exception: | ||||
|             _LOGGER.exception("Unexpected exception during credential validation") | ||||
|             self.errors["base"] = "unknown" | ||||
|         else: | ||||
|             await self.async_set_unique_id(airos_data.derived.mac) | ||||
|  | ||||
|             if self.source == SOURCE_REAUTH: | ||||
|                 self._abort_if_unique_id_mismatch() | ||||
|             except ( | ||||
|                 AirOSConnectionSetupError, | ||||
|                 AirOSDeviceConnectionError, | ||||
|             ): | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except (AirOSConnectionAuthenticationError, AirOSDataMissingError): | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except AirOSKeyDataMissingError: | ||||
|                 errors["base"] = "key_data_missing" | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unexpected exception") | ||||
|                 errors["base"] = "unknown" | ||||
|             else: | ||||
|                 await self.async_set_unique_id(airos_data.derived.mac) | ||||
|                 self._abort_if_unique_id_configured() | ||||
|  | ||||
|             return {"title": airos_data.host.hostname, "data": config_data} | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any], | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauthentication upon an API authentication error.""" | ||||
|         return await self.async_step_reauth_confirm(user_input) | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, | ||||
|         user_input: Mapping[str, Any], | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauthentication upon an API authentication error.""" | ||||
|         self.errors = {} | ||||
|  | ||||
|         if user_input: | ||||
|             validate_data = {**self._get_reauth_entry().data, **user_input} | ||||
|             if await self._validate_and_get_device_info(config_data=validate_data): | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     self._get_reauth_entry(), | ||||
|                     data_updates=validate_data, | ||||
|                 return self.async_create_entry( | ||||
|                     title=airos_data.host.hostname, data=user_input | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=vol.Schema( | ||||
|                 { | ||||
|                     vol.Required(CONF_PASSWORD): TextSelector( | ||||
|                         TextSelectorConfig( | ||||
|                             type=TextSelectorType.PASSWORD, | ||||
|                             autocomplete="current-password", | ||||
|                         ) | ||||
|                     ), | ||||
|                 } | ||||
|             ), | ||||
|             errors=self.errors, | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|   | ||||
| @@ -14,7 +14,7 @@ from airos.exceptions import ( | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed | ||||
| from homeassistant.exceptions import ConfigEntryError | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed | ||||
|  | ||||
| from .const import DOMAIN, SCAN_INTERVAL | ||||
| @@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]): | ||||
|         try: | ||||
|             await self.airos_device.login() | ||||
|             return await self.airos_device.status() | ||||
|         except AirOSConnectionAuthenticationError as err: | ||||
|         except (AirOSConnectionAuthenticationError,) as err: | ||||
|             _LOGGER.exception("Error authenticating with airOS device") | ||||
|             raise ConfigEntryAuthFailed( | ||||
|             raise ConfigEntryError( | ||||
|                 translation_domain=DOMAIN, translation_key="invalid_auth" | ||||
|             ) from err | ||||
|         except ( | ||||
|   | ||||
| @@ -2,14 +2,6 @@ | ||||
|   "config": { | ||||
|     "flow_title": "Ubiquiti airOS device", | ||||
|     "step": { | ||||
|       "reauth_confirm": { | ||||
|         "data": { | ||||
|           "password": "[%key:common::config_flow::data::password%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "password": "[%key:component::airos::config::step::user::data_description::password%]" | ||||
|         } | ||||
|       }, | ||||
|       "user": { | ||||
|         "data": { | ||||
|           "host": "[%key:common::config_flow::data::host%]", | ||||
| @@ -42,9 +34,7 @@ | ||||
|       "unknown": "[%key:common::config_flow::error::unknown%]" | ||||
|     }, | ||||
|     "abort": { | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", | ||||
|       "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", | ||||
|       "unique_id_mismatch": "Re-authentication should be used for the same device not a new one" | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" | ||||
|     } | ||||
|   }, | ||||
|   "entity": { | ||||
|   | ||||
| @@ -7,5 +7,5 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aioairq"], | ||||
|   "requirements": ["aioairq==0.4.6"] | ||||
|   "requirements": ["aioairq==0.4.7"] | ||||
| } | ||||
|   | ||||
| @@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|     } | ||||
| ) | ||||
|  | ||||
| URL_API_INTEGRATION = { | ||||
|     "url": "https://dashboard.airthings.com/integrations/api-integration" | ||||
| } | ||||
|  | ||||
|  | ||||
| class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Airthings.""" | ||||
| @@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_show_form( | ||||
|                 step_id="user", | ||||
|                 data_schema=STEP_USER_DATA_SCHEMA, | ||||
|                 description_placeholders=URL_API_INTEGRATION, | ||||
|                 description_placeholders={ | ||||
|                     "url": ( | ||||
|                         "https://dashboard.airthings.com/integrations/api-integration" | ||||
|                     ), | ||||
|                 }, | ||||
|             ) | ||||
|  | ||||
|         errors = {} | ||||
| @@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_create_entry(title="Airthings", data=user_input) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="user", | ||||
|             data_schema=STEP_USER_DATA_SCHEMA, | ||||
|             errors=errors, | ||||
|             description_placeholders=URL_API_INTEGRATION, | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|   | ||||
| @@ -4,9 +4,9 @@ | ||||
|       "user": { | ||||
|         "data": { | ||||
|           "id": "ID", | ||||
|           "secret": "Secret" | ||||
|         }, | ||||
|         "description": "Log in at {url} to find your credentials" | ||||
|           "secret": "Secret", | ||||
|           "description": "Login at {url} to find your credentials" | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
|     "error": { | ||||
|   | ||||
| @@ -6,13 +6,8 @@ import dataclasses | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from airthings_ble import ( | ||||
|     AirthingsBluetoothDeviceData, | ||||
|     AirthingsDevice, | ||||
|     UnsupportedDeviceError, | ||||
| ) | ||||
| from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice | ||||
| from bleak import BleakError | ||||
| from habluetooth import BluetoothServiceInfoBleak | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.components import bluetooth | ||||
| @@ -32,7 +27,6 @@ SERVICE_UUIDS = [ | ||||
|     "b42e4a8e-ade7-11e4-89d3-123b93f75cba", | ||||
|     "b42e1c08-ade7-11e4-89d3-123b93f75cba", | ||||
|     "b42e3882-ade7-11e4-89d3-123b93f75cba", | ||||
|     "b42e90a2-ade7-11e4-89d3-123b93f75cba", | ||||
| ] | ||||
|  | ||||
|  | ||||
| @@ -43,7 +37,6 @@ class Discovery: | ||||
|     name: str | ||||
|     discovery_info: BluetoothServiceInfo | ||||
|     device: AirthingsDevice | ||||
|     data: AirthingsBluetoothDeviceData | ||||
|  | ||||
|  | ||||
| def get_name(device: AirthingsDevice) -> str: | ||||
| @@ -51,7 +44,7 @@ def get_name(device: AirthingsDevice) -> str: | ||||
|  | ||||
|     name = device.friendly_name() | ||||
|     if identifier := device.identifier: | ||||
|         name += f" ({device.model.value}{identifier})" | ||||
|         name += f" ({identifier})" | ||||
|     return name | ||||
|  | ||||
|  | ||||
| @@ -69,8 +62,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|         self._discovered_device: Discovery | None = None | ||||
|         self._discovered_devices: dict[str, Discovery] = {} | ||||
|  | ||||
|     async def _get_device( | ||||
|         self, data: AirthingsBluetoothDeviceData, discovery_info: BluetoothServiceInfo | ||||
|     async def _get_device_data( | ||||
|         self, discovery_info: BluetoothServiceInfo | ||||
|     ) -> AirthingsDevice: | ||||
|         ble_device = bluetooth.async_ble_device_from_address( | ||||
|             self.hass, discovery_info.address | ||||
| @@ -79,8 +72,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             _LOGGER.debug("no ble_device in _get_device_data") | ||||
|             raise AirthingsDeviceUpdateError("No ble_device") | ||||
|  | ||||
|         airthings = AirthingsBluetoothDeviceData(_LOGGER) | ||||
|  | ||||
|         try: | ||||
|             device = await data.update_device(ble_device) | ||||
|             data = await airthings.update_device(ble_device) | ||||
|         except BleakError as err: | ||||
|             _LOGGER.error( | ||||
|                 "Error connecting to and getting data from %s: %s", | ||||
| @@ -88,15 +83,12 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 err, | ||||
|             ) | ||||
|             raise AirthingsDeviceUpdateError("Failed getting device data") from err | ||||
|         except UnsupportedDeviceError: | ||||
|             _LOGGER.debug("Skipping unsupported device: %s", discovery_info.name) | ||||
|             raise | ||||
|         except Exception as err: | ||||
|             _LOGGER.error( | ||||
|                 "Unknown error occurred from %s: %s", discovery_info.address, err | ||||
|             ) | ||||
|             raise | ||||
|         return device | ||||
|         return data | ||||
|  | ||||
|     async def async_step_bluetooth( | ||||
|         self, discovery_info: BluetoothServiceInfo | ||||
| @@ -106,21 +98,17 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|         await self.async_set_unique_id(discovery_info.address) | ||||
|         self._abort_if_unique_id_configured() | ||||
|  | ||||
|         data = AirthingsBluetoothDeviceData(logger=_LOGGER) | ||||
|  | ||||
|         try: | ||||
|             device = await self._get_device(data=data, discovery_info=discovery_info) | ||||
|             device = await self._get_device_data(discovery_info) | ||||
|         except AirthingsDeviceUpdateError: | ||||
|             return self.async_abort(reason="cannot_connect") | ||||
|         except UnsupportedDeviceError: | ||||
|             return self.async_abort(reason="unsupported_device") | ||||
|         except Exception: | ||||
|             _LOGGER.exception("Unknown error occurred") | ||||
|             return self.async_abort(reason="unknown") | ||||
|  | ||||
|         name = get_name(device) | ||||
|         self.context["title_placeholders"] = {"name": name} | ||||
|         self._discovered_device = Discovery(name, discovery_info, device, data=data) | ||||
|         self._discovered_device = Discovery(name, discovery_info, device) | ||||
|  | ||||
|         return await self.async_step_bluetooth_confirm() | ||||
|  | ||||
| @@ -129,12 +117,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Confirm discovery.""" | ||||
|         if user_input is not None: | ||||
|             if ( | ||||
|                 self._discovered_device is not None | ||||
|                 and self._discovered_device.device.firmware.need_firmware_upgrade | ||||
|             ): | ||||
|                 return self.async_abort(reason="firmware_upgrade_required") | ||||
|  | ||||
|             return self.async_create_entry( | ||||
|                 title=self.context["title_placeholders"]["name"], data={} | ||||
|             ) | ||||
| @@ -155,9 +137,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             self._abort_if_unique_id_configured() | ||||
|             discovery = self._discovered_devices[address] | ||||
|  | ||||
|             if discovery.device.firmware.need_firmware_upgrade: | ||||
|                 return self.async_abort(reason="firmware_upgrade_required") | ||||
|  | ||||
|             self.context["title_placeholders"] = { | ||||
|                 "name": discovery.name, | ||||
|             } | ||||
| @@ -167,53 +146,32 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_create_entry(title=discovery.name, data={}) | ||||
|  | ||||
|         current_addresses = self._async_current_ids(include_ignore=False) | ||||
|         devices: list[BluetoothServiceInfoBleak] = [] | ||||
|         for discovery_info in async_discovered_service_info(self.hass): | ||||
|             address = discovery_info.address | ||||
|             if address in current_addresses or address in self._discovered_devices: | ||||
|                 continue | ||||
|  | ||||
|             if MFCT_ID not in discovery_info.manufacturer_data: | ||||
|                 continue | ||||
|             if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids): | ||||
|                 _LOGGER.debug( | ||||
|                     "Skipping unsupported device: %s (%s)", discovery_info.name, address | ||||
|                 ) | ||||
|                 continue | ||||
|             devices.append(discovery_info) | ||||
|  | ||||
|         for discovery_info in devices: | ||||
|             address = discovery_info.address | ||||
|             data = AirthingsBluetoothDeviceData(logger=_LOGGER) | ||||
|             if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids): | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 device = await self._get_device(data, discovery_info) | ||||
|                 device = await self._get_device_data(discovery_info) | ||||
|             except AirthingsDeviceUpdateError: | ||||
|                 _LOGGER.error( | ||||
|                     "Error connecting to and getting data from %s (%s)", | ||||
|                     discovery_info.name, | ||||
|                     discovery_info.address, | ||||
|                 ) | ||||
|                 continue | ||||
|             except UnsupportedDeviceError: | ||||
|                 _LOGGER.debug( | ||||
|                     "Skipping unsupported device: %s (%s)", | ||||
|                     discovery_info.name, | ||||
|                     discovery_info.address, | ||||
|                 ) | ||||
|                 continue | ||||
|                 return self.async_abort(reason="cannot_connect") | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unknown error occurred") | ||||
|                 return self.async_abort(reason="unknown") | ||||
|             name = get_name(device) | ||||
|             _LOGGER.debug("Discovered Airthings device: %s (%s)", name, address) | ||||
|             self._discovered_devices[address] = Discovery( | ||||
|                 name, discovery_info, device, data | ||||
|             ) | ||||
|             self._discovered_devices[address] = Discovery(name, discovery_info, device) | ||||
|  | ||||
|         if not self._discovered_devices: | ||||
|             return self.async_abort(reason="no_devices_found") | ||||
|  | ||||
|         titles = { | ||||
|             address: get_name(discovery.device) | ||||
|             address: discovery.device.name | ||||
|             for (address, discovery) in self._discovered_devices.items() | ||||
|         } | ||||
|         return self.async_show_form( | ||||
|   | ||||
| @@ -17,10 +17,6 @@ | ||||
|     { | ||||
|       "manufacturer_id": 820, | ||||
|       "service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba" | ||||
|     }, | ||||
|     { | ||||
|       "manufacturer_id": 820, | ||||
|       "service_uuid": "b42e90a2-ade7-11e4-89d3-123b93f75cba" | ||||
|     } | ||||
|   ], | ||||
|   "codeowners": ["@vincegio", "@LaStrada"], | ||||
| @@ -28,5 +24,5 @@ | ||||
|   "dependencies": ["bluetooth_adapters"], | ||||
|   "documentation": "https://www.home-assistant.io/integrations/airthings_ble", | ||||
|   "iot_class": "local_polling", | ||||
|   "requirements": ["airthings-ble==1.1.1"] | ||||
|   "requirements": ["airthings-ble==0.9.2"] | ||||
| } | ||||
|   | ||||
| @@ -114,8 +114,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = { | ||||
|     ), | ||||
| } | ||||
|  | ||||
| PARALLEL_UPDATES = 0 | ||||
|  | ||||
|  | ||||
| @callback | ||||
| def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: | ||||
|   | ||||
| @@ -6,9 +6,6 @@ | ||||
|         "description": "[%key:component::bluetooth::config::step::user::description%]", | ||||
|         "data": { | ||||
|           "address": "[%key:common::config_flow::data::device%]" | ||||
|         }, | ||||
|         "data_description": { | ||||
|           "address": "The Airthings devices discovered via Bluetooth." | ||||
|         } | ||||
|       }, | ||||
|       "bluetooth_confirm": { | ||||
| @@ -20,8 +17,6 @@ | ||||
|       "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", | ||||
|       "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", | ||||
|       "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", | ||||
|       "firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.", | ||||
|       "unsupported_device": "Unsupported device", | ||||
|       "unknown": "[%key:common::config_flow::error::unknown%]" | ||||
|     } | ||||
|   }, | ||||
|   | ||||
| @@ -11,5 +11,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/airzone", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aioairzone"], | ||||
|   "requirements": ["aioairzone==1.0.1"] | ||||
|   "requirements": ["aioairzone==1.0.2"] | ||||
| } | ||||
|   | ||||
| @@ -8,5 +8,5 @@ | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["aioamazondevices"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["aioamazondevices==6.2.9"] | ||||
|   "requirements": ["aioamazondevices==6.4.6"] | ||||
| } | ||||
|   | ||||
| @@ -65,31 +65,6 @@ SENSOR_DESCRIPTIONS = [ | ||||
|         suggested_display_precision=2, | ||||
|         translation_placeholders={"sensor_name": "BME280"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.HUMIDITY, | ||||
|         key="BME680_humidity", | ||||
|         translation_key="humidity", | ||||
|         native_unit_of_measurement=PERCENTAGE, | ||||
|         suggested_display_precision=2, | ||||
|         translation_placeholders={"sensor_name": "BME680"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.PRESSURE, | ||||
|         key="BME680_pressure", | ||||
|         translation_key="pressure", | ||||
|         native_unit_of_measurement=UnitOfPressure.PA, | ||||
|         suggested_unit_of_measurement=UnitOfPressure.MMHG, | ||||
|         suggested_display_precision=0, | ||||
|         translation_placeholders={"sensor_name": "BME680"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.TEMPERATURE, | ||||
|         key="BME680_temperature", | ||||
|         translation_key="temperature", | ||||
|         native_unit_of_measurement=UnitOfTemperature.CELSIUS, | ||||
|         suggested_display_precision=2, | ||||
|         translation_placeholders={"sensor_name": "BME680"}, | ||||
|     ), | ||||
|     AltruistSensorEntityDescription( | ||||
|         device_class=SensorDeviceClass.PRESSURE, | ||||
|         key="BMP_pressure", | ||||
|   | ||||
| @@ -629,6 +629,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|  | ||||
|             devices_info.append( | ||||
|                 { | ||||
|                     "entities": [], | ||||
|                     "entry_type": device_entry.entry_type, | ||||
|                     "has_configuration_url": device_entry.configuration_url is not None, | ||||
|                     "hw_version": device_entry.hw_version, | ||||
| @@ -637,7 +638,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:  # noqa: C901 | ||||
|                     "model_id": device_entry.model_id, | ||||
|                     "sw_version": device_entry.sw_version, | ||||
|                     "via_device": device_entry.via_device_id, | ||||
|                     "entities": [], | ||||
|                 } | ||||
|             ) | ||||
|  | ||||
|   | ||||
| @@ -19,8 +19,9 @@ CONF_THINKING_BUDGET = "thinking_budget" | ||||
| RECOMMENDED_THINKING_BUDGET = 0 | ||||
| MIN_THINKING_BUDGET = 1024 | ||||
|  | ||||
| NON_THINKING_MODELS = [ | ||||
|     "claude-3-5",  # Both sonnet and haiku | ||||
|     "claude-3-opus", | ||||
|     "claude-3-haiku", | ||||
| THINKING_MODELS = [ | ||||
|     "claude-3-7-sonnet", | ||||
|     "claude-sonnet-4-0", | ||||
|     "claude-opus-4-0", | ||||
|     "claude-opus-4-1", | ||||
| ] | ||||
|   | ||||
| @@ -51,11 +51,11 @@ from .const import ( | ||||
|     DOMAIN, | ||||
|     LOGGER, | ||||
|     MIN_THINKING_BUDGET, | ||||
|     NON_THINKING_MODELS, | ||||
|     RECOMMENDED_CHAT_MODEL, | ||||
|     RECOMMENDED_MAX_TOKENS, | ||||
|     RECOMMENDED_TEMPERATURE, | ||||
|     RECOMMENDED_THINKING_BUDGET, | ||||
|     THINKING_MODELS, | ||||
| ) | ||||
|  | ||||
| # Max number of back and forth with the LLM to generate a response | ||||
| @@ -364,7 +364,7 @@ class AnthropicBaseLLMEntity(Entity): | ||||
|         if tools: | ||||
|             model_args["tools"] = tools | ||||
|         if ( | ||||
|             not model.startswith(tuple(NON_THINKING_MODELS)) | ||||
|             model.startswith(tuple(THINKING_MODELS)) | ||||
|             and thinking_budget >= MIN_THINKING_BUDGET | ||||
|         ): | ||||
|             model_args["thinking"] = ThinkingConfigEnabledParam( | ||||
|   | ||||
| @@ -8,5 +8,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/anthropic", | ||||
|   "integration_type": "service", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "requirements": ["anthropic==0.69.0"] | ||||
|   "requirements": ["anthropic==0.62.0"] | ||||
| } | ||||
|   | ||||
| @@ -7,13 +7,13 @@ from collections import namedtuple | ||||
| from collections.abc import Awaitable, Callable, Coroutine | ||||
| import functools | ||||
| import logging | ||||
| from typing import Any, cast | ||||
| from typing import Any | ||||
|  | ||||
| from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy | ||||
| from aiohttp import ClientSession | ||||
| from asusrouter import AsusRouter, AsusRouterError | ||||
| from asusrouter.config import ARConfigKey | ||||
| from asusrouter.modules.client import AsusClient | ||||
| from asusrouter.modules.client import AsusClient, ConnectionState | ||||
| from asusrouter.modules.data import AsusData | ||||
| from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors | ||||
| from asusrouter.tools.connection import get_cookie_jar | ||||
| @@ -219,7 +219,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge): | ||||
|     @property | ||||
|     def is_connected(self) -> bool: | ||||
|         """Get connected status.""" | ||||
|         return cast(bool, self._api.is_connected) | ||||
|         return self._api.is_connected | ||||
|  | ||||
|     async def async_connect(self) -> None: | ||||
|         """Connect to the device.""" | ||||
| @@ -235,8 +235,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge): | ||||
|  | ||||
|     async def async_disconnect(self) -> None: | ||||
|         """Disconnect to the device.""" | ||||
|         if self._api is not None and self._protocol == PROTOCOL_TELNET: | ||||
|             self._api.connection.disconnect() | ||||
|         await self._api.async_disconnect() | ||||
|  | ||||
|     async def async_get_connected_devices(self) -> dict[str, WrtDevice]: | ||||
|         """Get list of connected devices.""" | ||||
| @@ -437,6 +436,7 @@ class AsusWrtHttpBridge(AsusWrtBridge): | ||||
|             if dev.connection is not None | ||||
|             and dev.description is not None | ||||
|             and dev.connection.ip_address is not None | ||||
|             and dev.state is ConnectionState.CONNECTED | ||||
|         } | ||||
|  | ||||
|     async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]: | ||||
|   | ||||
| @@ -2,7 +2,9 @@ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from typing import Any | ||||
| from typing import Any, TypeVar | ||||
|  | ||||
| T = TypeVar("T", dict[str, Any], list[Any], None) | ||||
|  | ||||
| TRANSLATION_MAP = { | ||||
|     "wan_rx": "sensor_rx_bytes", | ||||
| @@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]: | ||||
|     return {k: v for k, v in raw.items() if v is not None or k.endswith("state")} | ||||
|  | ||||
|  | ||||
| def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T: | ||||
| def translate_to_legacy(raw: T) -> T: | ||||
|     """Translate raw data to legacy format for dicts and lists.""" | ||||
|  | ||||
|     if raw is None: | ||||
|   | ||||
| @@ -7,5 +7,5 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aioasuswrt", "asusrouter", "asyncssh"], | ||||
|   "requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"] | ||||
|   "requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"] | ||||
| } | ||||
|   | ||||
| @@ -36,11 +36,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo | ||||
|         raise ConfigEntryAuthFailed("Migration to OAuth required") | ||||
|  | ||||
|     session = async_create_august_clientsession(hass) | ||||
|     implementation = ( | ||||
|         await config_entry_oauth2_flow.async_get_config_entry_implementation( | ||||
|             hass, entry | ||||
|     try: | ||||
|         implementation = ( | ||||
|             await config_entry_oauth2_flow.async_get_config_entry_implementation( | ||||
|                 hass, entry | ||||
|             ) | ||||
|         ) | ||||
|     ) | ||||
|     except ValueError as err: | ||||
|         raise ConfigEntryNotReady("OAuth implementation not available") from err | ||||
|     oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) | ||||
|     august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session) | ||||
|     try: | ||||
|   | ||||
| @@ -5,7 +5,7 @@ | ||||
|       "step": { | ||||
|         "init": { | ||||
|           "title": "Set up two-factor authentication using TOTP", | ||||
|           "description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator](https://support.google.com/accounts/answer/1066447) or [Authy](https://authy.com/).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**." | ||||
|           "description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**." | ||||
|         } | ||||
|       }, | ||||
|       "error": { | ||||
|   | ||||
| @@ -26,6 +26,9 @@ async def async_setup_entry( | ||||
|  | ||||
|     if CONF_HOST in config_entry.data: | ||||
|         coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session) | ||||
|         config_entry.async_on_unload( | ||||
|             config_entry.add_update_listener(_async_update_listener) | ||||
|         ) | ||||
|     else: | ||||
|         coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session) | ||||
|  | ||||
| @@ -33,11 +36,6 @@ async def async_setup_entry( | ||||
|  | ||||
|     config_entry.runtime_data = coordinator | ||||
|  | ||||
|     if CONF_HOST in config_entry.data: | ||||
|         config_entry.async_on_unload( | ||||
|             config_entry.add_update_listener(_async_update_listener) | ||||
|         ) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) | ||||
|  | ||||
|     return True | ||||
|   | ||||
| @@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import frame | ||||
| from homeassistant.util import slugify | ||||
| from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter | ||||
|  | ||||
| from . import util | ||||
| from .agent import BackupAgent | ||||
| @@ -145,7 +144,7 @@ class DownloadBackupView(HomeAssistantView): | ||||
|                 return Response(status=HTTPStatus.NOT_FOUND) | ||||
|         else: | ||||
|             stream = await agent.async_download_backup(backup_id) | ||||
|             reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream)) | ||||
|             reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream)) | ||||
|  | ||||
|         worker_done_event = asyncio.Event() | ||||
|  | ||||
| @@ -153,7 +152,7 @@ class DownloadBackupView(HomeAssistantView): | ||||
|             """Call by the worker thread when it's done.""" | ||||
|             hass.loop.call_soon_threadsafe(worker_done_event.set) | ||||
|  | ||||
|         stream = AsyncIteratorWriter(hass.loop) | ||||
|         stream = util.AsyncIteratorWriter(hass) | ||||
|         worker = threading.Thread( | ||||
|             target=util.decrypt_backup, | ||||
|             args=[backup, reader, stream, password, on_done, 0, []], | ||||
|   | ||||
| @@ -38,7 +38,6 @@ from homeassistant.helpers import ( | ||||
| ) | ||||
| from homeassistant.helpers.json import json_bytes | ||||
| from homeassistant.util import dt as dt_util, json as json_util | ||||
| from homeassistant.util.async_iterator import AsyncIteratorReader | ||||
|  | ||||
| from . import util as backup_util | ||||
| from .agent import ( | ||||
| @@ -73,6 +72,7 @@ from .models import ( | ||||
| ) | ||||
| from .store import BackupStore | ||||
| from .util import ( | ||||
|     AsyncIteratorReader, | ||||
|     DecryptedBackupStreamer, | ||||
|     EncryptedBackupStreamer, | ||||
|     make_backup_dir, | ||||
| @@ -1525,7 +1525,7 @@ class BackupManager: | ||||
|             reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb") | ||||
|         else: | ||||
|             backup_stream = await agent.async_download_backup(backup_id) | ||||
|             reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream)) | ||||
|             reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream)) | ||||
|         try: | ||||
|             await self.hass.async_add_executor_job( | ||||
|                 validate_password_stream, reader, password | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from collections.abc import AsyncIterator, Callable, Coroutine | ||||
| from concurrent.futures import CancelledError, Future | ||||
| import copy | ||||
| from dataclasses import dataclass, replace | ||||
| from io import BytesIO | ||||
| @@ -13,7 +14,7 @@ from pathlib import Path, PurePath | ||||
| from queue import SimpleQueue | ||||
| import tarfile | ||||
| import threading | ||||
| from typing import IO, Any, cast | ||||
| from typing import IO, Any, Self, cast | ||||
|  | ||||
| import aiohttp | ||||
| from securetar import SecureTarError, SecureTarFile, SecureTarReadError | ||||
| @@ -22,11 +23,6 @@ from homeassistant.backup_restore import password_to_key | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.util import dt as dt_util | ||||
| from homeassistant.util.async_iterator import ( | ||||
|     Abort, | ||||
|     AsyncIteratorReader, | ||||
|     AsyncIteratorWriter, | ||||
| ) | ||||
| from homeassistant.util.json import JsonObjectType, json_loads_object | ||||
|  | ||||
| from .const import BUF_SIZE, LOGGER | ||||
| @@ -63,6 +59,12 @@ class BackupEmpty(DecryptError): | ||||
|     _message = "No tar files found in the backup." | ||||
|  | ||||
|  | ||||
| class AbortCipher(HomeAssistantError): | ||||
|     """Abort the cipher operation.""" | ||||
|  | ||||
|     _message = "Abort cipher operation." | ||||
|  | ||||
|  | ||||
| def make_backup_dir(path: Path) -> None: | ||||
|     """Create a backup directory if it does not exist.""" | ||||
|     path.mkdir(exist_ok=True) | ||||
| @@ -164,6 +166,106 @@ def validate_password(path: Path, password: str | None) -> bool: | ||||
|     return False | ||||
|  | ||||
|  | ||||
| class AsyncIteratorReader: | ||||
|     """Wrap an AsyncIterator.""" | ||||
|  | ||||
|     def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None: | ||||
|         """Initialize the wrapper.""" | ||||
|         self._aborted = False | ||||
|         self._hass = hass | ||||
|         self._stream = stream | ||||
|         self._buffer: bytes | None = None | ||||
|         self._next_future: Future[bytes | None] | None = None | ||||
|         self._pos: int = 0 | ||||
|  | ||||
|     async def _next(self) -> bytes | None: | ||||
|         """Get the next chunk from the iterator.""" | ||||
|         return await anext(self._stream, None) | ||||
|  | ||||
|     def abort(self) -> None: | ||||
|         """Abort the reader.""" | ||||
|         self._aborted = True | ||||
|         if self._next_future is not None: | ||||
|             self._next_future.cancel() | ||||
|  | ||||
|     def read(self, n: int = -1, /) -> bytes: | ||||
|         """Read data from the iterator.""" | ||||
|         result = bytearray() | ||||
|         while n < 0 or len(result) < n: | ||||
|             if not self._buffer: | ||||
|                 self._next_future = asyncio.run_coroutine_threadsafe( | ||||
|                     self._next(), self._hass.loop | ||||
|                 ) | ||||
|                 if self._aborted: | ||||
|                     self._next_future.cancel() | ||||
|                     raise AbortCipher | ||||
|                 try: | ||||
|                     self._buffer = self._next_future.result() | ||||
|                 except CancelledError as err: | ||||
|                     raise AbortCipher from err | ||||
|                 self._pos = 0 | ||||
|             if not self._buffer: | ||||
|                 # The stream is exhausted | ||||
|                 break | ||||
|             chunk = self._buffer[self._pos : self._pos + n] | ||||
|             result.extend(chunk) | ||||
|             n -= len(chunk) | ||||
|             self._pos += len(chunk) | ||||
|             if self._pos == len(self._buffer): | ||||
|                 self._buffer = None | ||||
|         return bytes(result) | ||||
|  | ||||
|     def close(self) -> None: | ||||
|         """Close the iterator.""" | ||||
|  | ||||
|  | ||||
| class AsyncIteratorWriter: | ||||
|     """Wrap an AsyncIterator.""" | ||||
|  | ||||
|     def __init__(self, hass: HomeAssistant) -> None: | ||||
|         """Initialize the wrapper.""" | ||||
|         self._aborted = False | ||||
|         self._hass = hass | ||||
|         self._pos: int = 0 | ||||
|         self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1) | ||||
|         self._write_future: Future[bytes | None] | None = None | ||||
|  | ||||
|     def __aiter__(self) -> Self: | ||||
|         """Return the iterator.""" | ||||
|         return self | ||||
|  | ||||
|     async def __anext__(self) -> bytes: | ||||
|         """Get the next chunk from the iterator.""" | ||||
|         if data := await self._queue.get(): | ||||
|             return data | ||||
|         raise StopAsyncIteration | ||||
|  | ||||
|     def abort(self) -> None: | ||||
|         """Abort the writer.""" | ||||
|         self._aborted = True | ||||
|         if self._write_future is not None: | ||||
|             self._write_future.cancel() | ||||
|  | ||||
|     def tell(self) -> int: | ||||
|         """Return the current position in the iterator.""" | ||||
|         return self._pos | ||||
|  | ||||
|     def write(self, s: bytes, /) -> int: | ||||
|         """Write data to the iterator.""" | ||||
|         self._write_future = asyncio.run_coroutine_threadsafe( | ||||
|             self._queue.put(s), self._hass.loop | ||||
|         ) | ||||
|         if self._aborted: | ||||
|             self._write_future.cancel() | ||||
|             raise AbortCipher | ||||
|         try: | ||||
|             self._write_future.result() | ||||
|         except CancelledError as err: | ||||
|             raise AbortCipher from err | ||||
|         self._pos += len(s) | ||||
|         return len(s) | ||||
|  | ||||
|  | ||||
| def validate_password_stream( | ||||
|     input_stream: IO[bytes], | ||||
|     password: str | None, | ||||
| @@ -240,7 +342,7 @@ def decrypt_backup( | ||||
|         finally: | ||||
|             # Write an empty chunk to signal the end of the stream | ||||
|             output_stream.write(b"") | ||||
|     except Abort: | ||||
|     except AbortCipher: | ||||
|         LOGGER.debug("Cipher operation aborted") | ||||
|     finally: | ||||
|         on_done(error) | ||||
| @@ -328,7 +430,7 @@ def encrypt_backup( | ||||
|         finally: | ||||
|             # Write an empty chunk to signal the end of the stream | ||||
|             output_stream.write(b"") | ||||
|     except Abort: | ||||
|     except AbortCipher: | ||||
|         LOGGER.debug("Cipher operation aborted") | ||||
|     finally: | ||||
|         on_done(error) | ||||
| @@ -455,8 +557,8 @@ class _CipherBackupStreamer: | ||||
|             self._hass.loop.call_soon_threadsafe(worker_status.done.set) | ||||
|  | ||||
|         stream = await self._open_stream() | ||||
|         reader = AsyncIteratorReader(self._hass.loop, stream) | ||||
|         writer = AsyncIteratorWriter(self._hass.loop) | ||||
|         reader = AsyncIteratorReader(self._hass, stream) | ||||
|         writer = AsyncIteratorWriter(self._hass) | ||||
|         worker = threading.Thread( | ||||
|             target=self._cipher_func, | ||||
|             args=[ | ||||
|   | ||||
| @@ -73,12 +73,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) | ||||
|     # Add the websocket and API client | ||||
|     entry.runtime_data = BangOlufsenData(websocket, client) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||||
|  | ||||
|     # Start WebSocket connection once the platforms have been loaded. | ||||
|     # This ensures that the initial WebSocket notifications are dispatched to entities | ||||
|     # Start WebSocket connection | ||||
|     await client.connect_notifications(remote_control=True, reconnect=True) | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -125,8 +125,7 @@ async def async_setup_entry( | ||||
|     async_add_entities( | ||||
|         new_entities=[ | ||||
|             BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) | ||||
|         ], | ||||
|         update_before_add=True, | ||||
|         ] | ||||
|     ) | ||||
|  | ||||
|     # Register actions. | ||||
| @@ -267,8 +266,34 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): | ||||
|             self._software_status.software_version, | ||||
|         ) | ||||
|  | ||||
|         # Get overall device state once. This is handled by WebSocket events the rest of the time. | ||||
|         product_state = await self._client.get_product_state() | ||||
|  | ||||
|         # Get volume information. | ||||
|         if product_state.volume: | ||||
|             self._volume = product_state.volume | ||||
|  | ||||
|         # Get all playback information. | ||||
|         # Ensure that the metadata is not None upon startup | ||||
|         if product_state.playback: | ||||
|             if product_state.playback.metadata: | ||||
|                 self._playback_metadata = product_state.playback.metadata | ||||
|                 self._remote_leader = product_state.playback.metadata.remote_leader | ||||
|             if product_state.playback.progress: | ||||
|                 self._playback_progress = product_state.playback.progress | ||||
|             if product_state.playback.source: | ||||
|                 self._source_change = product_state.playback.source | ||||
|             if product_state.playback.state: | ||||
|                 self._playback_state = product_state.playback.state | ||||
|                 # Set initial state | ||||
|                 if self._playback_state.value: | ||||
|                     self._state = self._playback_state.value | ||||
|  | ||||
|         self._attr_media_position_updated_at = utcnow() | ||||
|  | ||||
|         # Get the highest resolution available of the given images. | ||||
|         self._media_image = get_highest_resolution_artwork(self._playback_metadata) | ||||
|  | ||||
|         # If the device has been updated with new sources, then the API will fail here. | ||||
|         await self._async_update_sources() | ||||
|  | ||||
|   | ||||
| @@ -3,12 +3,16 @@ beolink_allstandby: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|  | ||||
| beolink_expand: | ||||
|   target: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|   fields: | ||||
|     all_discovered: | ||||
|       required: false | ||||
| @@ -33,6 +37,8 @@ beolink_join: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|   fields: | ||||
|     jid_options: | ||||
|       collapsed: false | ||||
| @@ -65,12 +71,16 @@ beolink_leave: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|  | ||||
| beolink_unexpand: | ||||
|   target: | ||||
|     entity: | ||||
|       integration: bang_olufsen | ||||
|       domain: media_player | ||||
|     device: | ||||
|       integration: bang_olufsen | ||||
|   fields: | ||||
|     jid_options: | ||||
|       collapsed: false | ||||
|   | ||||
| @@ -19,8 +19,8 @@ | ||||
|     "bleak-retry-connector==4.4.3", | ||||
|     "bluetooth-adapters==2.1.0", | ||||
|     "bluetooth-auto-recovery==1.5.3", | ||||
|     "bluetooth-data-tools==1.28.3", | ||||
|     "dbus-fast==2.44.5", | ||||
|     "habluetooth==5.7.0" | ||||
|     "bluetooth-data-tools==1.28.2", | ||||
|     "dbus-fast==2.44.3", | ||||
|     "habluetooth==5.6.4" | ||||
|   ] | ||||
| } | ||||
|   | ||||
| @@ -68,12 +68,17 @@ class IntegrationMatchHistory: | ||||
|     manufacturer_data: bool | ||||
|     service_data: set[str] | ||||
|     service_uuids: set[str] | ||||
|     name: str | ||||
|  | ||||
|  | ||||
| def seen_all_fields( | ||||
|     previous_match: IntegrationMatchHistory, advertisement_data: AdvertisementData | ||||
|     previous_match: IntegrationMatchHistory, | ||||
|     advertisement_data: AdvertisementData, | ||||
|     name: str, | ||||
| ) -> bool: | ||||
|     """Return if we have seen all fields.""" | ||||
|     if previous_match.name != name: | ||||
|         return False | ||||
|     if not previous_match.manufacturer_data and advertisement_data.manufacturer_data: | ||||
|         return False | ||||
|     if advertisement_data.service_data and ( | ||||
| @@ -122,10 +127,11 @@ class IntegrationMatcher: | ||||
|         device = service_info.device | ||||
|         advertisement_data = service_info.advertisement | ||||
|         connectable = service_info.connectable | ||||
|         name = service_info.name | ||||
|         matched = self._matched_connectable if connectable else self._matched | ||||
|         matched_domains: set[str] = set() | ||||
|         if (previous_match := matched.get(device.address)) and seen_all_fields( | ||||
|             previous_match, advertisement_data | ||||
|             previous_match, advertisement_data, name | ||||
|         ): | ||||
|             # We have seen all fields so we can skip the rest of the matchers | ||||
|             return matched_domains | ||||
| @@ -140,11 +146,13 @@ class IntegrationMatcher: | ||||
|             ) | ||||
|             previous_match.service_data |= set(advertisement_data.service_data) | ||||
|             previous_match.service_uuids |= set(advertisement_data.service_uuids) | ||||
|             previous_match.name = name | ||||
|         else: | ||||
|             matched[device.address] = IntegrationMatchHistory( | ||||
|                 manufacturer_data=bool(advertisement_data.manufacturer_data), | ||||
|                 service_data=set(advertisement_data.service_data), | ||||
|                 service_uuids=set(advertisement_data.service_uuids), | ||||
|                 name=name, | ||||
|             ) | ||||
|         return matched_domains | ||||
|  | ||||
|   | ||||
| @@ -8,5 +8,5 @@ | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["bring_api"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["bring-api==1.1.0"] | ||||
|   "requirements": ["bring-api==1.1.1"] | ||||
| } | ||||
|   | ||||
| @@ -8,7 +8,7 @@ | ||||
|   "integration_type": "device", | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], | ||||
|   "requirements": ["brother==5.1.0"], | ||||
|   "requirements": ["brother==5.1.1"], | ||||
|   "zeroconf": [ | ||||
|     { | ||||
|       "type": "_printer._tcp.local.", | ||||
|   | ||||
| @@ -315,7 +315,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: | ||||
|     hass.http.register_view(CalendarListView(component)) | ||||
|     hass.http.register_view(CalendarEventView(component)) | ||||
|  | ||||
|     frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar") | ||||
|     frontend.async_register_built_in_panel( | ||||
|         hass, "calendar", "calendar", "hass:calendar" | ||||
|     ) | ||||
|  | ||||
|     websocket_api.async_register_command(hass, handle_calendar_event_create) | ||||
|     websocket_api.async_register_command(hass, handle_calendar_event_delete) | ||||
|   | ||||
| @@ -51,6 +51,12 @@ from homeassistant.const import ( | ||||
| from homeassistant.core import Event, HomeAssistant, ServiceCall, callback | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers import config_validation as cv, issue_registry as ir | ||||
| from homeassistant.helpers.deprecation import ( | ||||
|     DeprecatedConstantEnum, | ||||
|     all_with_deprecated_constants, | ||||
|     check_if_deprecated_constant, | ||||
|     dir_with_deprecated_constants, | ||||
| ) | ||||
| from homeassistant.helpers.entity import Entity, EntityDescription | ||||
| from homeassistant.helpers.entity_component import EntityComponent | ||||
| from homeassistant.helpers.event import async_track_time_interval | ||||
| @@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename" | ||||
| ATTR_MEDIA_PLAYER: Final = "media_player" | ||||
| ATTR_FORMAT: Final = "format" | ||||
|  | ||||
| # These constants are deprecated as of Home Assistant 2024.10 | ||||
| # Please use the StreamType enum instead. | ||||
| _DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10") | ||||
| _DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10") | ||||
| _DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10") | ||||
|  | ||||
|  | ||||
| class CameraEntityFeature(IntFlag): | ||||
|     """Supported features of the camera entity.""" | ||||
| @@ -1105,3 +1117,11 @@ async def async_handle_record_service( | ||||
|         duration=service_call.data[CONF_DURATION], | ||||
|         lookback=service_call.data[CONF_LOOKBACK], | ||||
|     ) | ||||
|  | ||||
|  | ||||
| # These can be removed if no deprecated constant are in this module anymore | ||||
| __getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) | ||||
| __dir__ = partial( | ||||
|     dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] | ||||
| ) | ||||
| __all__ = all_with_deprecated_constants(globals()) | ||||
|   | ||||
| @@ -53,6 +53,7 @@ from .const import ( | ||||
|     CONF_ACME_SERVER, | ||||
|     CONF_ALEXA, | ||||
|     CONF_ALIASES, | ||||
|     CONF_CLOUDHOOK_SERVER, | ||||
|     CONF_COGNITO_CLIENT_ID, | ||||
|     CONF_ENTITY_CONFIG, | ||||
|     CONF_FILTER, | ||||
| @@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema( | ||||
|                 vol.Optional(CONF_ACCOUNT_LINK_SERVER): str, | ||||
|                 vol.Optional(CONF_ACCOUNTS_SERVER): str, | ||||
|                 vol.Optional(CONF_ACME_SERVER): str, | ||||
|                 vol.Optional(CONF_CLOUDHOOK_SERVER): str, | ||||
|                 vol.Optional(CONF_RELAYER_SERVER): str, | ||||
|                 vol.Optional(CONF_REMOTESTATE_SERVER): str, | ||||
|                 vol.Optional(CONF_SERVICEHANDLERS_SERVER): str, | ||||
|   | ||||
| @@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id" | ||||
| CONF_ACCOUNT_LINK_SERVER = "account_link_server" | ||||
| CONF_ACCOUNTS_SERVER = "accounts_server" | ||||
| CONF_ACME_SERVER = "acme_server" | ||||
| CONF_CLOUDHOOK_SERVER = "cloudhook_server" | ||||
| CONF_RELAYER_SERVER = "relayer_server" | ||||
| CONF_REMOTESTATE_SERVER = "remotestate_server" | ||||
| CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server" | ||||
|   | ||||
| @@ -13,6 +13,6 @@ | ||||
|   "integration_type": "system", | ||||
|   "iot_class": "cloud_push", | ||||
|   "loggers": ["acme", "hass_nabucasa", "snitun"], | ||||
|   "requirements": ["hass-nabucasa==1.2.0"], | ||||
|   "requirements": ["hass-nabucasa==1.1.1"], | ||||
|   "single_config_entry": true | ||||
| } | ||||
|   | ||||
| @@ -38,6 +38,10 @@ TYPE_SPECIFY_COUNTRY = "specify_country_code" | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| DESCRIPTION_PLACEHOLDER = { | ||||
|     "register_link": "https://electricitymaps.com/free-tier", | ||||
| } | ||||
|  | ||||
|  | ||||
| class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Co2signal.""" | ||||
| @@ -70,6 +74,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             return self.async_show_form( | ||||
|                 step_id="user", | ||||
|                 data_schema=data_schema, | ||||
|                 description_placeholders=DESCRIPTION_PLACEHOLDER, | ||||
|             ) | ||||
|  | ||||
|         data = {CONF_API_KEY: user_input[CONF_API_KEY]} | ||||
| @@ -179,4 +184,5 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             step_id=step_id, | ||||
|             data_schema=data_schema, | ||||
|             errors=errors, | ||||
|             description_placeholders=DESCRIPTION_PLACEHOLDER, | ||||
|         ) | ||||
|   | ||||
| @@ -1,106 +0,0 @@ | ||||
| rules: | ||||
|   # Bronze | ||||
|   action-setup: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any actions. | ||||
|   appropriate-polling: done | ||||
|   brands: done | ||||
|   common-modules: done | ||||
|   config-flow-test-coverage: | ||||
|     status: todo | ||||
|     comment: | | ||||
|       Stale docstring and test name: `test_form_home` and reusing result. | ||||
|       Extract `async_setup_entry` into own fixture. | ||||
|       Avoid importing `config_flow` in tests. | ||||
|       Test reauth with errors | ||||
|   config-flow: | ||||
|     status: todo | ||||
|     comment: | | ||||
|       The config flow misses data descriptions. | ||||
|       Remove URLs from data descriptions, they should be replaced with placeholders. | ||||
|       Make use of Electricity Maps zone keys in country code as dropdown. | ||||
|       Make use of location selector for coordinates. | ||||
|   dependency-transparency: done | ||||
|   docs-actions: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any actions. | ||||
|   docs-high-level-description: done | ||||
|   docs-installation-instructions: done | ||||
|   docs-removal-instructions: done | ||||
|   entity-event-setup: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       Entities of this integration do not explicitly subscribe to events. | ||||
|   entity-unique-id: done | ||||
|   has-entity-name: done | ||||
|   runtime-data: done | ||||
|   test-before-configure: done | ||||
|   test-before-setup: done | ||||
|   unique-config-entry: todo | ||||
|  | ||||
|   # Silver | ||||
|   action-exceptions: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any actions. | ||||
|   config-entry-unloading: done | ||||
|   docs-configuration-parameters: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration does not provide any additional options. | ||||
|   docs-installation-parameters: done | ||||
|   entity-unavailable: done | ||||
|   integration-owner: done | ||||
|   log-when-unavailable: done | ||||
|   parallel-updates: todo | ||||
|   reauthentication-flow: done | ||||
|   test-coverage: | ||||
|     status: todo | ||||
|     comment: | | ||||
|       Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})` | ||||
|       `test_sensor` could use `snapshot_platform` | ||||
|  | ||||
|   # Gold | ||||
|   devices: done | ||||
|   diagnostics: done | ||||
|   discovery-update-info: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration cannot be discovered, it is a connecting to a cloud service. | ||||
|   discovery: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration cannot be discovered, it is a connecting to a cloud service. | ||||
|   docs-data-update: done | ||||
|   docs-examples: done | ||||
|   docs-known-limitations: done | ||||
|   docs-supported-devices: done | ||||
|   docs-supported-functions: done | ||||
|   docs-troubleshooting: done | ||||
|   docs-use-cases: done | ||||
|   dynamic-devices: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       The integration connects to a single service per configuration entry. | ||||
|   entity-category: done | ||||
|   entity-device-class: done | ||||
|   entity-disabled-by-default: done | ||||
|   entity-translations: done | ||||
|   exception-translations: todo | ||||
|   icon-translations: todo | ||||
|   reconfiguration-flow: todo | ||||
|   repair-issues: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration does not raise any repairable issues. | ||||
|   stale-devices: | ||||
|     status: exempt | ||||
|     comment: | | ||||
|       This integration connect to a single device per configuration entry. | ||||
|  | ||||
|   # Platinum | ||||
|   async-dependency: done | ||||
|   inject-websession: done | ||||
|   strict-typing: done | ||||
| @@ -6,7 +6,7 @@ | ||||
|           "location": "[%key:common::config_flow::data::location%]", | ||||
|           "api_key": "[%key:common::config_flow::data::access_token%]" | ||||
|         }, | ||||
|         "description": "Visit https://electricitymaps.com/free-tier to request a token." | ||||
|         "description": "Visit the [Electricity Maps page]({register_link}) to request a token." | ||||
|       }, | ||||
|       "coordinates": { | ||||
|         "data": { | ||||
|   | ||||
| @@ -166,6 +166,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|                 data_schema=STEP_USER_DATA_SCHEMA, | ||||
|                 description_placeholders={ | ||||
|                     "account_name": self.reauth_entry.title, | ||||
|                     "developer_url": "https://www.coinbase.com/developer-platform", | ||||
|                 }, | ||||
|                 errors=errors, | ||||
|             ) | ||||
| @@ -195,6 +196,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|             data_schema=STEP_USER_DATA_SCHEMA, | ||||
|             description_placeholders={ | ||||
|                 "account_name": self.reauth_entry.title, | ||||
|                 "developer_url": "https://www.coinbase.com/developer-platform", | ||||
|             }, | ||||
|             errors=errors, | ||||
|         ) | ||||
|   | ||||
| @@ -11,7 +11,7 @@ | ||||
|       }, | ||||
|       "reauth_confirm": { | ||||
|         "title": "Update Coinbase API credentials", | ||||
|         "description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform  to create new credentials for {account_name}.", | ||||
|         "description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit the [Developer Platform]({developer_url}) to create new credentials for {account_name}.", | ||||
|         "data": { | ||||
|           "api_key": "[%key:common::config_flow::data::api_key%]", | ||||
|           "api_token": "API secret" | ||||
|   | ||||
| @@ -7,7 +7,14 @@ from typing import Any, cast | ||||
| from aiocomelit import ComelitSerialBridgeObject | ||||
| from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON | ||||
|  | ||||
| from homeassistant.components.cover import CoverDeviceClass, CoverEntity | ||||
| from homeassistant.components.cover import ( | ||||
|     STATE_CLOSED, | ||||
|     STATE_CLOSING, | ||||
|     STATE_OPEN, | ||||
|     STATE_OPENING, | ||||
|     CoverDeviceClass, | ||||
|     CoverEntity, | ||||
| ) | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
| from homeassistant.helpers.restore_state import RestoreEntity | ||||
| @@ -62,7 +69,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | ||||
|         super().__init__(coordinator, device, config_entry_entry_id) | ||||
|         # Device doesn't provide a status so we assume UNKNOWN at first startup | ||||
|         self._last_action: int | None = None | ||||
|         self._last_state: str | None = None | ||||
|  | ||||
|     def _current_action(self, action: str) -> bool: | ||||
|         """Return the current cover action.""" | ||||
| @@ -98,7 +104,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | ||||
|     @bridge_api_call | ||||
|     async def _cover_set_state(self, action: int, state: int) -> None: | ||||
|         """Set desired cover state.""" | ||||
|         self._last_state = self.state | ||||
|         await self.coordinator.api.set_device_status(COVER, self._device.index, action) | ||||
|         self.coordinator.data[COVER][self._device.index].status = state | ||||
|         self.async_write_ha_state() | ||||
| @@ -124,5 +129,10 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity): | ||||
|  | ||||
|         await super().async_added_to_hass() | ||||
|  | ||||
|         if last_state := await self.async_get_last_state(): | ||||
|             self._last_state = last_state.state | ||||
|         if (state := await self.async_get_last_state()) is not None: | ||||
|             if state.state == STATE_CLOSED: | ||||
|                 self._last_action = STATE_COVER.index(STATE_CLOSING) | ||||
|             if state.state == STATE_OPEN: | ||||
|                 self._last_action = STATE_COVER.index(STATE_OPENING) | ||||
|  | ||||
|             self._attr_is_closed = state.state == STATE_CLOSED | ||||
|   | ||||
| @@ -8,5 +8,5 @@ | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["aiocomelit"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["aiocomelit==1.1.1"] | ||||
|   "requirements": ["aiocomelit==1.1.2"] | ||||
| } | ||||
|   | ||||
| @@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) | ||||
| async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: | ||||
|     """Set up the config component.""" | ||||
|     frontend.async_register_built_in_panel( | ||||
|         hass, "config", "config", "mdi:cog", require_admin=True | ||||
|         hass, "config", "config", "hass:cog", require_admin=True | ||||
|     ) | ||||
|  | ||||
|     for panel in SECTIONS: | ||||
|   | ||||
| @@ -4,7 +4,6 @@ from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from http import HTTPStatus | ||||
| import logging | ||||
| from typing import Any, NoReturn | ||||
|  | ||||
| from aiohttp import web | ||||
| @@ -24,12 +23,7 @@ from homeassistant.helpers.data_entry_flow import ( | ||||
|     FlowManagerResourceView, | ||||
| ) | ||||
| from homeassistant.helpers.dispatcher import async_dispatcher_connect | ||||
| from homeassistant.helpers.json import ( | ||||
|     JSON_DUMP, | ||||
|     find_paths_unserializable_data, | ||||
|     json_bytes, | ||||
|     json_fragment, | ||||
| ) | ||||
| from homeassistant.helpers.json import json_fragment | ||||
| from homeassistant.loader import ( | ||||
|     Integration, | ||||
|     IntegrationNotFound, | ||||
| @@ -37,9 +31,6 @@ from homeassistant.loader import ( | ||||
|     async_get_integrations, | ||||
|     async_get_loaded_integration, | ||||
| ) | ||||
| from homeassistant.util.json import format_unserializable_data | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| @callback | ||||
| @@ -411,40 +402,18 @@ def config_entries_flow_subscribe( | ||||
|     connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow( | ||||
|         async_on_flow_init_remove | ||||
|     ) | ||||
|     try: | ||||
|         serialized_flows = [ | ||||
|             json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw}) | ||||
|             for flw in hass.config_entries.flow.async_progress() | ||||
|             if flw["context"]["source"] | ||||
|             not in ( | ||||
|                 config_entries.SOURCE_RECONFIGURE, | ||||
|                 config_entries.SOURCE_USER, | ||||
|             ) | ||||
|         ] | ||||
|     except (ValueError, TypeError): | ||||
|         # If we can't serialize, we'll filter out unserializable flows | ||||
|         serialized_flows = [] | ||||
|         for flw in hass.config_entries.flow.async_progress(): | ||||
|             if flw["context"]["source"] in ( | ||||
|                 config_entries.SOURCE_RECONFIGURE, | ||||
|                 config_entries.SOURCE_USER, | ||||
|             ): | ||||
|                 continue | ||||
|             try: | ||||
|                 serialized_flows.append( | ||||
|                     json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw}) | ||||
|                 ) | ||||
|             except (ValueError, TypeError): | ||||
|                 _LOGGER.error( | ||||
|                     "Unable to serialize to JSON. Bad data found at %s", | ||||
|                     format_unserializable_data( | ||||
|                         find_paths_unserializable_data(flw, dump=JSON_DUMP) | ||||
|                     ), | ||||
|                 ) | ||||
|                 continue | ||||
|     connection.send_message( | ||||
|         websocket_api.messages.construct_event_message( | ||||
|             msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]")) | ||||
|         websocket_api.event_message( | ||||
|             msg["id"], | ||||
|             [ | ||||
|                 {"type": None, "flow_id": flw["flow_id"], "flow": flw} | ||||
|                 for flw in hass.config_entries.flow.async_progress() | ||||
|                 if flw["context"]["source"] | ||||
|                 not in ( | ||||
|                     config_entries.SOURCE_RECONFIGURE, | ||||
|                     config_entries.SOURCE_USER, | ||||
|                 ) | ||||
|             ], | ||||
|         ) | ||||
|     ) | ||||
|     connection.send_result(msg["id"]) | ||||
|   | ||||
| @@ -514,7 +514,7 @@ class ChatLog: | ||||
|         """Set the LLM system prompt.""" | ||||
|         llm_api: llm.APIInstance | None = None | ||||
|  | ||||
|         if user_llm_hass_api is None: | ||||
|         if not user_llm_hass_api: | ||||
|             pass | ||||
|         elif isinstance(user_llm_hass_api, llm.API): | ||||
|             llm_api = await user_llm_hass_api.async_get_api_instance(llm_context) | ||||
|   | ||||
| @@ -7,5 +7,5 @@ | ||||
|   "integration_type": "hub", | ||||
|   "iot_class": "cloud_push", | ||||
|   "quality_scale": "bronze", | ||||
|   "requirements": ["pycync==0.4.1"] | ||||
|   "requirements": ["pycync==0.4.2"] | ||||
| } | ||||
|   | ||||
| @@ -23,7 +23,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC | ||||
| from homeassistant.util.ssl import client_context_no_verify | ||||
|  | ||||
| from .const import KEY_MAC, TIMEOUT_SEC | ||||
| from .const import KEY_MAC, TIMEOUT | ||||
| from .coordinator import DaikinConfigEntry, DaikinCoordinator | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
| @@ -42,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo | ||||
|     session = async_get_clientsession(hass) | ||||
|     host = conf[CONF_HOST] | ||||
|     try: | ||||
|         async with asyncio.timeout(TIMEOUT_SEC): | ||||
|         async with asyncio.timeout(TIMEOUT): | ||||
|             device: Appliance = await DaikinFactory( | ||||
|                 host, | ||||
|                 session, | ||||
| @@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo | ||||
|             ) | ||||
|         _LOGGER.debug("Connection to %s successful", host) | ||||
|     except TimeoutError as err: | ||||
|         _LOGGER.debug("Connection to %s timed out in %s seconds", host, TIMEOUT_SEC) | ||||
|         _LOGGER.debug("Connection to %s timed out in 60 seconds", host) | ||||
|         raise ConfigEntryNotReady from err | ||||
|     except ClientConnectionError as err: | ||||
|         _LOGGER.debug("ClientConnectionError to %s", host) | ||||
|   | ||||
| @@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
| from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo | ||||
| from homeassistant.util.ssl import client_context_no_verify | ||||
|  | ||||
| from .const import DOMAIN, KEY_MAC, TIMEOUT_SEC | ||||
| from .const import DOMAIN, KEY_MAC, TIMEOUT | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -84,7 +84,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|             password = None | ||||
|  | ||||
|         try: | ||||
|             async with asyncio.timeout(TIMEOUT_SEC): | ||||
|             async with asyncio.timeout(TIMEOUT): | ||||
|                 device: Appliance = await DaikinFactory( | ||||
|                     host, | ||||
|                     async_get_clientsession(self.hass), | ||||
|   | ||||
| @@ -24,4 +24,4 @@ ATTR_STATE_OFF = "off" | ||||
| KEY_MAC = "mac" | ||||
| KEY_IP = "ip" | ||||
|  | ||||
| TIMEOUT_SEC = 120 | ||||
| TIMEOUT = 60 | ||||
|   | ||||
| @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator | ||||
|  | ||||
| from .const import DOMAIN, TIMEOUT_SEC | ||||
| from .const import DOMAIN | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| @@ -28,7 +28,7 @@ class DaikinCoordinator(DataUpdateCoordinator[None]): | ||||
|             _LOGGER, | ||||
|             config_entry=entry, | ||||
|             name=device.values.get("name", DOMAIN), | ||||
|             update_interval=timedelta(seconds=TIMEOUT_SEC), | ||||
|             update_interval=timedelta(seconds=60), | ||||
|         ) | ||||
|         self.device = device | ||||
|  | ||||
|   | ||||
| @@ -32,7 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||||
|             entry, | ||||
|             options={**entry.options, CONF_SOURCE: source_entity_id}, | ||||
|         ) | ||||
|         hass.config_entries.async_schedule_reload(entry.entry_id) | ||||
|  | ||||
|     entry.async_on_unload( | ||||
|         async_handle_source_entity_changes( | ||||
| @@ -47,9 +46,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||||
|         ) | ||||
|     ) | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,)) | ||||
|     entry.async_on_unload(entry.add_update_listener(config_entry_update_listener)) | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: | ||||
|     """Update listener, called when the config entry options are changed.""" | ||||
|     await hass.config_entries.async_reload(entry.entry_id) | ||||
|  | ||||
|  | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||||
|     """Unload a config entry.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,)) | ||||
|   | ||||
| @@ -140,7 +140,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): | ||||
|  | ||||
|     config_flow = CONFIG_FLOW | ||||
|     options_flow = OPTIONS_FLOW | ||||
|     options_flow_reloads = True | ||||
|  | ||||
|     VERSION = 1 | ||||
|     MINOR_VERSION = 4 | ||||
|   | ||||
| @@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol | ||||
|  | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS | ||||
| from homeassistant.const import CONF_DOMAIN | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers import config_validation as cv | ||||
| from homeassistant.helpers.condition import ( | ||||
|     Condition, | ||||
|     ConditionCheckerType, | ||||
|     ConditionConfig, | ||||
|     trace_condition_function, | ||||
| ) | ||||
| from homeassistant.helpers.typing import ConfigType | ||||
| @@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol): | ||||
| class DeviceCondition(Condition): | ||||
|     """Device condition.""" | ||||
|  | ||||
|     _hass: HomeAssistant | ||||
|     _config: ConfigType | ||||
|  | ||||
|     @classmethod | ||||
|     async def async_validate_complete_config( | ||||
|         cls, hass: HomeAssistant, complete_config: ConfigType | ||||
|     ) -> ConfigType: | ||||
|         """Validate complete config.""" | ||||
|         complete_config = await async_validate_device_automation_config( | ||||
|             hass, | ||||
|             complete_config, | ||||
|             cv.DEVICE_CONDITION_SCHEMA, | ||||
|             DeviceAutomationType.CONDITION, | ||||
|         ) | ||||
|         # Since we don't want to migrate device conditions to a new format | ||||
|         # we just pass the entire config as options. | ||||
|         complete_config[CONF_OPTIONS] = complete_config.copy() | ||||
|         return complete_config | ||||
|     def __init__(self, hass: HomeAssistant, config: ConfigType) -> None: | ||||
|         """Initialize condition.""" | ||||
|         self._config = config | ||||
|         self._hass = hass | ||||
|  | ||||
|     @classmethod | ||||
|     async def async_validate_config( | ||||
|         cls, hass: HomeAssistant, config: ConfigType | ||||
|     ) -> ConfigType: | ||||
|         """Validate config. | ||||
|  | ||||
|         This is here just to satisfy the abstract class interface. It is never called. | ||||
|         """ | ||||
|         raise NotImplementedError | ||||
|  | ||||
|     def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None: | ||||
|         """Initialize condition.""" | ||||
|         self._hass = hass | ||||
|         assert config.options is not None | ||||
|         self._config = config.options | ||||
|         """Validate device condition config.""" | ||||
|         return await async_validate_device_automation_config( | ||||
|             hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION | ||||
|         ) | ||||
|  | ||||
|     async def async_get_checker(self) -> condition.ConditionCheckerType: | ||||
|         """Test a device condition.""" | ||||
|   | ||||
| @@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity): | ||||
|         self._attr_translation_key = "button" | ||||
|         self._attr_translation_placeholders = {"key": str(key)} | ||||
|  | ||||
|     def sync_callback(self, message: tuple) -> None: | ||||
|     def _sync(self, message: tuple) -> None: | ||||
|         """Update the binary sensor state.""" | ||||
|         if ( | ||||
|             message[0] == self._remote_control_property.element_uid | ||||
|   | ||||
| @@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity): | ||||
|         ) | ||||
|  | ||||
|         self.subscriber: Subscriber | None = None | ||||
|         self.sync_callback = self._sync | ||||
|  | ||||
|         self._value: float | ||||
|  | ||||
| @@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity): | ||||
|             self._device_instance.uid, self.subscriber | ||||
|         ) | ||||
|  | ||||
|     def sync_callback(self, message: tuple) -> None: | ||||
|     def _sync(self, message: tuple) -> None: | ||||
|         """Update the state.""" | ||||
|         if message[0] == self._attr_unique_id: | ||||
|             self._value = message[1] | ||||
|   | ||||
| @@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity): | ||||
|         """ | ||||
|         return f"{self._attr_unique_id}_{self._sensor_type}" | ||||
|  | ||||
|     def sync_callback(self, message: tuple) -> None: | ||||
|     def _sync(self, message: tuple) -> None: | ||||
|         """Update the consumption sensor state.""" | ||||
|         if message[0] == self._attr_unique_id: | ||||
|             self._value = getattr( | ||||
|   | ||||
| @@ -13,3 +13,8 @@ class Subscriber: | ||||
|         """Initiate the subscriber.""" | ||||
|         self.name = name | ||||
|         self.callback = callback | ||||
|  | ||||
|     def update(self, message: str) -> None: | ||||
|         """Trigger hass to update the device.""" | ||||
|         _LOGGER.debug('%s got message "%s"', self.name, message) | ||||
|         self.callback(message) | ||||
|   | ||||
| @@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity): | ||||
|         """Switch off the device.""" | ||||
|         self._binary_switch_property.set(state=False) | ||||
|  | ||||
|     def sync_callback(self, message: tuple) -> None: | ||||
|     def _sync(self, message: tuple) -> None: | ||||
|         """Update the binary switch state and consumption.""" | ||||
|         if message[0].startswith("devolo.BinarySwitch"): | ||||
|             self._attr_is_on = self._device_instance.binary_switch_property[ | ||||
|   | ||||
| @@ -17,6 +17,6 @@ | ||||
|   "requirements": [ | ||||
|     "aiodhcpwatcher==1.2.1", | ||||
|     "aiodiscover==2.7.1", | ||||
|     "cached-ipaddress==1.0.1" | ||||
|     "cached-ipaddress==0.10.0" | ||||
|   ] | ||||
| } | ||||
|   | ||||
| @@ -56,16 +56,16 @@ async def async_setup_entry( | ||||
|     hostname = entry.data[CONF_HOSTNAME] | ||||
|     name = entry.data[CONF_NAME] | ||||
|  | ||||
|     nameserver_ipv4 = entry.options[CONF_RESOLVER] | ||||
|     nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6] | ||||
|     resolver_ipv4 = entry.options[CONF_RESOLVER] | ||||
|     resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6] | ||||
|     port_ipv4 = entry.options[CONF_PORT] | ||||
|     port_ipv6 = entry.options[CONF_PORT_IPV6] | ||||
|  | ||||
|     entities = [] | ||||
|     if entry.data[CONF_IPV4]: | ||||
|         entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4)) | ||||
|         entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4)) | ||||
|     if entry.data[CONF_IPV6]: | ||||
|         entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6)) | ||||
|         entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6)) | ||||
|  | ||||
|     async_add_entities(entities, update_before_add=True) | ||||
|  | ||||
| @@ -77,13 +77,11 @@ class WanIpSensor(SensorEntity): | ||||
|     _attr_translation_key = "dnsip" | ||||
|     _unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"}) | ||||
|  | ||||
|     resolver: aiodns.DNSResolver | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         name: str, | ||||
|         hostname: str, | ||||
|         nameserver: str, | ||||
|         resolver: str, | ||||
|         ipv6: bool, | ||||
|         port: int, | ||||
|     ) -> None: | ||||
| @@ -92,11 +90,11 @@ class WanIpSensor(SensorEntity): | ||||
|         self._attr_unique_id = f"{hostname}_{ipv6}" | ||||
|         self.hostname = hostname | ||||
|         self.port = port | ||||
|         self.nameserver = nameserver | ||||
|         self._resolver = resolver | ||||
|         self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A" | ||||
|         self._retries = DEFAULT_RETRIES | ||||
|         self._attr_extra_state_attributes = { | ||||
|             "resolver": nameserver, | ||||
|             "resolver": resolver, | ||||
|             "querytype": self.querytype, | ||||
|         } | ||||
|         self._attr_device_info = DeviceInfo( | ||||
| @@ -106,13 +104,13 @@ class WanIpSensor(SensorEntity): | ||||
|             model=aiodns.__version__, | ||||
|             name=name, | ||||
|         ) | ||||
|         self.resolver: aiodns.DNSResolver | ||||
|         self.create_dns_resolver() | ||||
|  | ||||
|     def create_dns_resolver(self) -> None: | ||||
|         """Create the DNS resolver.""" | ||||
|         self.resolver = aiodns.DNSResolver( | ||||
|             nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port | ||||
|         ) | ||||
|         self.resolver = aiodns.DNSResolver(tcp_port=self.port, udp_port=self.port) | ||||
|         self.resolver.nameservers = [self._resolver] | ||||
|  | ||||
|     async def async_update(self) -> None: | ||||
|         """Get the current DNS IP address for hostname.""" | ||||
|   | ||||
| @@ -6,6 +6,6 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/droplet", | ||||
|   "iot_class": "local_push", | ||||
|   "quality_scale": "bronze", | ||||
|   "requirements": ["pydroplet==2.3.3"], | ||||
|   "requirements": ["pydroplet==2.3.4"], | ||||
|   "zeroconf": ["_droplet._tcp.local."] | ||||
| } | ||||
|   | ||||
| @@ -61,5 +61,8 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|         return self.async_show_form( | ||||
|             step_id="authorize", | ||||
|             errors=errors, | ||||
|             description_placeholders={"pin": self._ecobee.pin}, | ||||
|             description_placeholders={ | ||||
|                 "pin": self._ecobee.pin, | ||||
|                 "auth_url": "https://www.ecobee.com/consumerportal/index.html", | ||||
|             }, | ||||
|         ) | ||||
|   | ||||
| @@ -8,7 +8,7 @@ | ||||
|         } | ||||
|       }, | ||||
|       "authorize": { | ||||
|         "description": "Please authorize this app at https://www.ecobee.com/consumerportal/index.html with PIN code:\n\n{pin}\n\nThen, select **Submit**." | ||||
|         "description": "Please authorize this app at {auth_url} with PIN code:\n\n{pin}\n\nThen, select **Submit**." | ||||
|       } | ||||
|     }, | ||||
|     "error": { | ||||
|   | ||||
| @@ -116,9 +116,6 @@ | ||||
|       } | ||||
|     }, | ||||
|     "select": { | ||||
|       "active_map": { | ||||
|         "default": "mdi:floor-plan" | ||||
|       }, | ||||
|       "water_amount": { | ||||
|         "default": "mdi:water" | ||||
|       }, | ||||
|   | ||||
| @@ -6,5 +6,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/ecovacs", | ||||
|   "iot_class": "cloud_push", | ||||
|   "loggers": ["sleekxmppfs", "sucks", "deebot_client"], | ||||
|   "requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"] | ||||
|   "requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"] | ||||
| } | ||||
|   | ||||
| @@ -2,13 +2,12 @@ | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from dataclasses import dataclass | ||||
| from typing import TYPE_CHECKING, Any | ||||
| from typing import Any | ||||
|  | ||||
| from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes | ||||
| from deebot_client.capabilities import CapabilitySetTypes | ||||
| from deebot_client.device import Device | ||||
| from deebot_client.events import WorkModeEvent | ||||
| from deebot_client.events.base import Event | ||||
| from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent | ||||
| from deebot_client.events.water_info import WaterAmountEvent | ||||
|  | ||||
| from homeassistant.components.select import SelectEntity, SelectEntityDescription | ||||
| @@ -17,11 +16,7 @@ from homeassistant.core import HomeAssistant | ||||
| from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback | ||||
|  | ||||
| from . import EcovacsConfigEntry | ||||
| from .entity import ( | ||||
|     EcovacsCapabilityEntityDescription, | ||||
|     EcovacsDescriptionEntity, | ||||
|     EcovacsEntity, | ||||
| ) | ||||
| from .entity import EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity | ||||
| from .util import get_name_key, get_supported_entities | ||||
|  | ||||
|  | ||||
| @@ -71,12 +66,6 @@ async def async_setup_entry( | ||||
|     entities = get_supported_entities( | ||||
|         controller, EcovacsSelectEntity, ENTITY_DESCRIPTIONS | ||||
|     ) | ||||
|     entities.extend( | ||||
|         EcovacsActiveMapSelectEntity(device, device.capabilities.map) | ||||
|         for device in controller.devices | ||||
|         if (map_cap := device.capabilities.map) | ||||
|         and isinstance(map_cap.major, CapabilitySet) | ||||
|     ) | ||||
|     if entities: | ||||
|         async_add_entities(entities) | ||||
|  | ||||
| @@ -114,76 +103,3 @@ class EcovacsSelectEntity[EventT: Event]( | ||||
|     async def async_select_option(self, option: str) -> None: | ||||
|         """Change the selected option.""" | ||||
|         await self._device.execute_command(self._capability.set(option)) | ||||
|  | ||||
|  | ||||
| class EcovacsActiveMapSelectEntity( | ||||
|     EcovacsEntity[CapabilityMap], | ||||
|     SelectEntity, | ||||
| ): | ||||
|     """Ecovacs active map select entity.""" | ||||
|  | ||||
|     entity_description = SelectEntityDescription( | ||||
|         key="active_map", | ||||
|         translation_key="active_map", | ||||
|         entity_category=EntityCategory.CONFIG, | ||||
|     ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         device: Device, | ||||
|         capability: CapabilityMap, | ||||
|         **kwargs: Any, | ||||
|     ) -> None: | ||||
|         """Initialize entity.""" | ||||
|         super().__init__(device, capability, **kwargs) | ||||
|         self._option_to_id: dict[str, str] = {} | ||||
|         self._id_to_option: dict[str, str] = {} | ||||
|  | ||||
|         self._handle_on_cached_map( | ||||
|             device.events.get_last_event(CachedMapInfoEvent) | ||||
|             or CachedMapInfoEvent(set()) | ||||
|         ) | ||||
|  | ||||
|     def _handle_on_cached_map(self, event: CachedMapInfoEvent) -> None: | ||||
|         self._id_to_option.clear() | ||||
|         self._option_to_id.clear() | ||||
|  | ||||
|         for map_info in event.maps: | ||||
|             name = map_info.name if map_info.name else map_info.id | ||||
|             self._id_to_option[map_info.id] = name | ||||
|             self._option_to_id[name] = map_info.id | ||||
|  | ||||
|             if map_info.using: | ||||
|                 self._attr_current_option = name | ||||
|  | ||||
|         if self._attr_current_option not in self._option_to_id: | ||||
|             self._attr_current_option = None | ||||
|  | ||||
|         # Sort named maps first, then numeric IDs (unnamed maps during building) in ascending order. | ||||
|         self._attr_options = sorted( | ||||
|             self._option_to_id.keys(), key=lambda x: (x.isdigit(), x.lower()) | ||||
|         ) | ||||
|  | ||||
|     async def async_added_to_hass(self) -> None: | ||||
|         """Set up the event listeners now that hass is ready.""" | ||||
|         await super().async_added_to_hass() | ||||
|  | ||||
|         async def on_cached_map(event: CachedMapInfoEvent) -> None: | ||||
|             self._handle_on_cached_map(event) | ||||
|             self.async_write_ha_state() | ||||
|  | ||||
|         self._subscribe(self._capability.cached_info.event, on_cached_map) | ||||
|  | ||||
|         async def on_major_map(event: MajorMapEvent) -> None: | ||||
|             self._attr_current_option = self._id_to_option.get(event.map_id) | ||||
|             self.async_write_ha_state() | ||||
|  | ||||
|         self._subscribe(self._capability.major.event, on_major_map) | ||||
|  | ||||
|     async def async_select_option(self, option: str) -> None: | ||||
|         """Change the selected option.""" | ||||
|         if TYPE_CHECKING: | ||||
|             assert isinstance(self._capability.major, CapabilitySet) | ||||
|         await self._device.execute_command( | ||||
|             self._capability.major.set(self._option_to_id[option]) | ||||
|         ) | ||||
|   | ||||
| @@ -2,4 +2,3 @@ raw_get_positions: | ||||
|   target: | ||||
|     entity: | ||||
|       domain: vacuum | ||||
|       integration: ecovacs | ||||
|   | ||||
| @@ -178,9 +178,6 @@ | ||||
|       } | ||||
|     }, | ||||
|     "select": { | ||||
|       "active_map": { | ||||
|         "name": "Active map" | ||||
|       }, | ||||
|       "water_amount": { | ||||
|         "name": "[%key:component::ecovacs::entity::number::water_amount::name%]", | ||||
|         "state": { | ||||
|   | ||||
| @@ -7,7 +7,7 @@ | ||||
|   "iot_class": "local_polling", | ||||
|   "loggers": ["pyenphase"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": ["pyenphase==2.4.0"], | ||||
|   "requirements": ["pyenphase==2.3.0"], | ||||
|   "zeroconf": [ | ||||
|     { | ||||
|       "type": "_enphase-envoy._tcp.local." | ||||
|   | ||||
| @@ -396,7 +396,6 @@ class EnvoyCTSensorEntityDescription(SensorEntityDescription): | ||||
|         int | float | str | CtType | CtMeterStatus | CtStatusFlags | CtState | None, | ||||
|     ] | ||||
|     on_phase: str | None | ||||
|     cttype: str | None = None | ||||
|  | ||||
|  | ||||
| CT_NET_CONSUMPTION_SENSORS = ( | ||||
| @@ -410,7 +409,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         suggested_display_precision=3, | ||||
|         value_fn=attrgetter("energy_delivered"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="lifetime_net_production", | ||||
| @@ -422,7 +420,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         suggested_display_precision=3, | ||||
|         value_fn=attrgetter("energy_received"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="net_consumption", | ||||
| @@ -434,7 +431,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         suggested_display_precision=3, | ||||
|         value_fn=attrgetter("active_power"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="frequency", | ||||
| @@ -446,7 +442,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("frequency"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="voltage", | ||||
| @@ -459,7 +454,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("voltage"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="net_ct_current", | ||||
| @@ -472,7 +466,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("current"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="net_ct_powerfactor", | ||||
| @@ -483,7 +476,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("power_factor"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="net_consumption_ct_metering_status", | ||||
| @@ -494,7 +486,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("metering_status"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="net_consumption_ct_status_flags", | ||||
| @@ -504,7 +495,6 @@ CT_NET_CONSUMPTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.NET_CONSUMPTION, | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| @@ -535,7 +525,6 @@ CT_PRODUCTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("frequency"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.PRODUCTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="production_ct_voltage", | ||||
| @@ -548,7 +537,6 @@ CT_PRODUCTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("voltage"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.PRODUCTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="production_ct_current", | ||||
| @@ -561,7 +549,6 @@ CT_PRODUCTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("current"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.PRODUCTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="production_ct_powerfactor", | ||||
| @@ -572,7 +559,6 @@ CT_PRODUCTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("power_factor"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.PRODUCTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="production_ct_metering_status", | ||||
| @@ -583,7 +569,6 @@ CT_PRODUCTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("metering_status"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.PRODUCTION, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="production_ct_status_flags", | ||||
| @@ -593,7 +578,6 @@ CT_PRODUCTION_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.PRODUCTION, | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| @@ -623,7 +607,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         suggested_display_precision=3, | ||||
|         value_fn=attrgetter("energy_delivered"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="lifetime_battery_charged", | ||||
| @@ -635,7 +618,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         suggested_display_precision=3, | ||||
|         value_fn=attrgetter("energy_received"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="battery_discharge", | ||||
| @@ -647,7 +629,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         suggested_display_precision=3, | ||||
|         value_fn=attrgetter("active_power"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="storage_ct_frequency", | ||||
| @@ -659,7 +640,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("frequency"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="storage_voltage", | ||||
| @@ -672,7 +652,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("voltage"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="storage_ct_current", | ||||
| @@ -685,7 +664,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("current"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="storage_ct_powerfactor", | ||||
| @@ -696,7 +674,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("power_factor"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="storage_ct_metering_status", | ||||
| @@ -707,7 +684,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=attrgetter("metering_status"), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
|     EnvoyCTSensorEntityDescription( | ||||
|         key="storage_ct_status_flags", | ||||
| @@ -717,7 +693,6 @@ CT_STORAGE_SENSORS = ( | ||||
|         entity_registry_enabled_default=False, | ||||
|         value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags), | ||||
|         on_phase=None, | ||||
|         cttype=CtType.STORAGE, | ||||
|     ), | ||||
| ) | ||||
|  | ||||
| @@ -1040,31 +1015,50 @@ async def async_setup_entry( | ||||
|             for description in NET_CONSUMPTION_PHASE_SENSORS[use_phase] | ||||
|             if phase is not None | ||||
|         ) | ||||
|     # Add Current Transformer entities | ||||
|     if envoy_data.ctmeters: | ||||
|     # Add net consumption CT entities | ||||
|     if ctmeter := envoy_data.ctmeter_consumption: | ||||
|         entities.extend( | ||||
|             EnvoyCTEntity(coordinator, description) | ||||
|             for sensors in ( | ||||
|                 CT_NET_CONSUMPTION_SENSORS, | ||||
|                 CT_PRODUCTION_SENSORS, | ||||
|                 CT_STORAGE_SENSORS, | ||||
|             ) | ||||
|             for description in sensors | ||||
|             if description.cttype in envoy_data.ctmeters | ||||
|             EnvoyConsumptionCTEntity(coordinator, description) | ||||
|             for description in CT_NET_CONSUMPTION_SENSORS | ||||
|             if ctmeter.measurement_type == CtType.NET_CONSUMPTION | ||||
|         ) | ||||
|     # Add Current Transformer phase entities | ||||
|     if ctmeters_phases := envoy_data.ctmeters_phases: | ||||
|     # For each net consumption ct phase reported add net consumption entities | ||||
|     if phase_data := envoy_data.ctmeter_consumption_phases: | ||||
|         entities.extend( | ||||
|             EnvoyCTPhaseEntity(coordinator, description) | ||||
|             for sensors in ( | ||||
|                 CT_NET_CONSUMPTION_PHASE_SENSORS, | ||||
|                 CT_PRODUCTION_PHASE_SENSORS, | ||||
|                 CT_STORAGE_PHASE_SENSORS, | ||||
|             ) | ||||
|             for phase, descriptions in sensors.items() | ||||
|             for description in descriptions | ||||
|             if (cttype := description.cttype) in ctmeters_phases | ||||
|             and phase in ctmeters_phases[cttype] | ||||
|             EnvoyConsumptionCTPhaseEntity(coordinator, description) | ||||
|             for use_phase, phase in phase_data.items() | ||||
|             for description in CT_NET_CONSUMPTION_PHASE_SENSORS[use_phase] | ||||
|             if phase.measurement_type == CtType.NET_CONSUMPTION | ||||
|         ) | ||||
|     # Add production CT entities | ||||
|     if ctmeter := envoy_data.ctmeter_production: | ||||
|         entities.extend( | ||||
|             EnvoyProductionCTEntity(coordinator, description) | ||||
|             for description in CT_PRODUCTION_SENSORS | ||||
|             if ctmeter.measurement_type == CtType.PRODUCTION | ||||
|         ) | ||||
|     # For each production ct phase reported add production ct entities | ||||
|     if phase_data := envoy_data.ctmeter_production_phases: | ||||
|         entities.extend( | ||||
|             EnvoyProductionCTPhaseEntity(coordinator, description) | ||||
|             for use_phase, phase in phase_data.items() | ||||
|             for description in CT_PRODUCTION_PHASE_SENSORS[use_phase] | ||||
|             if phase.measurement_type == CtType.PRODUCTION | ||||
|         ) | ||||
|     # Add storage CT entities | ||||
|     if ctmeter := envoy_data.ctmeter_storage: | ||||
|         entities.extend( | ||||
|             EnvoyStorageCTEntity(coordinator, description) | ||||
|             for description in CT_STORAGE_SENSORS | ||||
|             if ctmeter.measurement_type == CtType.STORAGE | ||||
|         ) | ||||
|     # For each storage ct phase reported add storage ct entities | ||||
|     if phase_data := envoy_data.ctmeter_storage_phases: | ||||
|         entities.extend( | ||||
|             EnvoyStorageCTPhaseEntity(coordinator, description) | ||||
|             for use_phase, phase in phase_data.items() | ||||
|             for description in CT_STORAGE_PHASE_SENSORS[use_phase] | ||||
|             if phase.measurement_type == CtType.STORAGE | ||||
|         ) | ||||
|  | ||||
|     if envoy_data.inverters: | ||||
| @@ -1251,8 +1245,8 @@ class EnvoyNetConsumptionPhaseEntity(EnvoySystemSensorEntity): | ||||
|         return self.entity_description.value_fn(system_net_consumption) | ||||
|  | ||||
|  | ||||
| class EnvoyCTEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy CT entity.""" | ||||
| class EnvoyConsumptionCTEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy net consumption CT entity.""" | ||||
|  | ||||
|     entity_description: EnvoyCTSensorEntityDescription | ||||
|  | ||||
| @@ -1261,13 +1255,13 @@ class EnvoyCTEntity(EnvoySystemSensorEntity): | ||||
|         self, | ||||
|     ) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None: | ||||
|         """Return the state of the CT sensor.""" | ||||
|         if (cttype := self.entity_description.cttype) not in self.data.ctmeters: | ||||
|         if (ctmeter := self.data.ctmeter_consumption) is None: | ||||
|             return None | ||||
|         return self.entity_description.value_fn(self.data.ctmeters[cttype]) | ||||
|         return self.entity_description.value_fn(ctmeter) | ||||
|  | ||||
|  | ||||
| class EnvoyCTPhaseEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy CT phase entity.""" | ||||
| class EnvoyConsumptionCTPhaseEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy net consumption CT phase entity.""" | ||||
|  | ||||
|     entity_description: EnvoyCTSensorEntityDescription | ||||
|  | ||||
| @@ -1278,14 +1272,78 @@ class EnvoyCTPhaseEntity(EnvoySystemSensorEntity): | ||||
|         """Return the state of the CT phase sensor.""" | ||||
|         if TYPE_CHECKING: | ||||
|             assert self.entity_description.on_phase | ||||
|         if (cttype := self.entity_description.cttype) not in self.data.ctmeters_phases: | ||||
|             return None | ||||
|         if (phase := self.entity_description.on_phase) not in self.data.ctmeters_phases[ | ||||
|             cttype | ||||
|         ]: | ||||
|         if (ctmeter := self.data.ctmeter_consumption_phases) is None: | ||||
|             return None | ||||
|         return self.entity_description.value_fn( | ||||
|             self.data.ctmeters_phases[cttype][phase] | ||||
|             ctmeter[self.entity_description.on_phase] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class EnvoyProductionCTEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy net consumption CT entity.""" | ||||
|  | ||||
|     entity_description: EnvoyCTSensorEntityDescription | ||||
|  | ||||
|     @property | ||||
|     def native_value( | ||||
|         self, | ||||
|     ) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None: | ||||
|         """Return the state of the CT sensor.""" | ||||
|         if (ctmeter := self.data.ctmeter_production) is None: | ||||
|             return None | ||||
|         return self.entity_description.value_fn(ctmeter) | ||||
|  | ||||
|  | ||||
| class EnvoyProductionCTPhaseEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy net consumption CT phase entity.""" | ||||
|  | ||||
|     entity_description: EnvoyCTSensorEntityDescription | ||||
|  | ||||
|     @property | ||||
|     def native_value( | ||||
|         self, | ||||
|     ) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None: | ||||
|         """Return the state of the CT phase sensor.""" | ||||
|         if TYPE_CHECKING: | ||||
|             assert self.entity_description.on_phase | ||||
|         if (ctmeter := self.data.ctmeter_production_phases) is None: | ||||
|             return None | ||||
|         return self.entity_description.value_fn( | ||||
|             ctmeter[self.entity_description.on_phase] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class EnvoyStorageCTEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy net storage CT entity.""" | ||||
|  | ||||
|     entity_description: EnvoyCTSensorEntityDescription | ||||
|  | ||||
|     @property | ||||
|     def native_value( | ||||
|         self, | ||||
|     ) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None: | ||||
|         """Return the state of the CT sensor.""" | ||||
|         if (ctmeter := self.data.ctmeter_storage) is None: | ||||
|             return None | ||||
|         return self.entity_description.value_fn(ctmeter) | ||||
|  | ||||
|  | ||||
| class EnvoyStorageCTPhaseEntity(EnvoySystemSensorEntity): | ||||
|     """Envoy net storage CT phase entity.""" | ||||
|  | ||||
|     entity_description: EnvoyCTSensorEntityDescription | ||||
|  | ||||
|     @property | ||||
|     def native_value( | ||||
|         self, | ||||
|     ) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None: | ||||
|         """Return the state of the CT phase sensor.""" | ||||
|         if TYPE_CHECKING: | ||||
|             assert self.entity_description.on_phase | ||||
|         if (ctmeter := self.data.ctmeter_storage_phases) is None: | ||||
|             return None | ||||
|         return self.entity_description.value_fn( | ||||
|             ctmeter[self.entity_description.on_phase] | ||||
|         ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -6,5 +6,5 @@ | ||||
|   "documentation": "https://www.home-assistant.io/integrations/environment_canada", | ||||
|   "iot_class": "cloud_polling", | ||||
|   "loggers": ["env_canada"], | ||||
|   "requirements": ["env-canada==0.11.2"] | ||||
|   "requirements": ["env-canada==0.11.3"] | ||||
| } | ||||
|   | ||||
| @@ -22,23 +22,19 @@ import voluptuous as vol | ||||
|  | ||||
| from homeassistant.components import zeroconf | ||||
| from homeassistant.config_entries import ( | ||||
|     SOURCE_ESPHOME, | ||||
|     SOURCE_IGNORE, | ||||
|     SOURCE_REAUTH, | ||||
|     SOURCE_RECONFIGURE, | ||||
|     ConfigEntry, | ||||
|     ConfigFlow, | ||||
|     ConfigFlowResult, | ||||
|     FlowType, | ||||
|     OptionsFlow, | ||||
| ) | ||||
| from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT | ||||
| from homeassistant.core import callback | ||||
| from homeassistant.data_entry_flow import AbortFlow, FlowResultType | ||||
| from homeassistant.helpers import discovery_flow | ||||
| from homeassistant.data_entry_flow import AbortFlow | ||||
| from homeassistant.helpers.device_registry import format_mac | ||||
| from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo | ||||
| from homeassistant.helpers.service_info.esphome import ESPHomeServiceInfo | ||||
| from homeassistant.helpers.service_info.hassio import HassioServiceInfo | ||||
| from homeassistant.helpers.service_info.mqtt import MqttServiceInfo | ||||
| from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo | ||||
| @@ -79,7 +75,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|     def __init__(self) -> None: | ||||
|         """Initialize flow.""" | ||||
|         self._host: str | None = None | ||||
|         self._connected_address: str | None = None | ||||
|         self.__name: str | None = None | ||||
|         self._port: int | None = None | ||||
|         self._password: str | None = None | ||||
| @@ -503,55 +498,18 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|         await self.hass.config_entries.async_remove( | ||||
|             self._entry_with_name_conflict.entry_id | ||||
|         ) | ||||
|         return await self._async_create_entry() | ||||
|         return self._async_create_entry() | ||||
|  | ||||
|     async def _async_create_entry(self) -> ConfigFlowResult: | ||||
|     @callback | ||||
|     def _async_create_entry(self) -> ConfigFlowResult: | ||||
|         """Create the config entry.""" | ||||
|         assert self._name is not None | ||||
|         assert self._device_info is not None | ||||
|  | ||||
|         # Check if Z-Wave capabilities are present and start discovery flow | ||||
|         next_flow_id: str | None = None | ||||
|         if self._device_info.zwave_proxy_feature_flags: | ||||
|             assert self._connected_address is not None | ||||
|             assert self._port is not None | ||||
|  | ||||
|             # Start Z-Wave discovery flow and get the flow ID | ||||
|             zwave_result = await self.hass.config_entries.flow.async_init( | ||||
|                 "zwave_js", | ||||
|                 context={ | ||||
|                     "source": SOURCE_ESPHOME, | ||||
|                     "discovery_key": discovery_flow.DiscoveryKey( | ||||
|                         domain=DOMAIN, | ||||
|                         key=self._device_info.mac_address, | ||||
|                         version=1, | ||||
|                     ), | ||||
|                 }, | ||||
|                 data=ESPHomeServiceInfo( | ||||
|                     name=self._device_info.name, | ||||
|                     zwave_home_id=self._device_info.zwave_home_id or None, | ||||
|                     ip_address=self._connected_address, | ||||
|                     port=self._port, | ||||
|                     noise_psk=self._noise_psk, | ||||
|                 ), | ||||
|             ) | ||||
|             if zwave_result["type"] in ( | ||||
|                 FlowResultType.ABORT, | ||||
|                 FlowResultType.CREATE_ENTRY, | ||||
|             ): | ||||
|                 _LOGGER.debug( | ||||
|                     "Unable to continue created Z-Wave JS config flow: %s", zwave_result | ||||
|                 ) | ||||
|             else: | ||||
|                 next_flow_id = zwave_result["flow_id"] | ||||
|  | ||||
|         return self.async_create_entry( | ||||
|             title=self._name, | ||||
|             data=self._async_make_config_data(), | ||||
|             options={ | ||||
|                 CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, | ||||
|             }, | ||||
|             next_flow=(FlowType.CONFIG_FLOW, next_flow_id) if next_flow_id else None, | ||||
|         ) | ||||
|  | ||||
|     @callback | ||||
| @@ -598,7 +556,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|             if entry.data.get(CONF_DEVICE_NAME) == self._device_name: | ||||
|                 self._entry_with_name_conflict = entry | ||||
|                 return await self.async_step_name_conflict() | ||||
|         return await self._async_create_entry() | ||||
|         return self._async_create_entry() | ||||
|  | ||||
|     async def _async_reauth_validated_connection(self) -> ConfigFlowResult: | ||||
|         """Handle reauth validated connection.""" | ||||
| @@ -745,7 +703,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): | ||||
|         try: | ||||
|             await cli.connect() | ||||
|             self._device_info = await cli.device_info() | ||||
|             self._connected_address = cli.connected_address | ||||
|         except InvalidAuthAPIError: | ||||
|             return ERROR_INVALID_PASSWORD_AUTH | ||||
|         except RequiresEncryptionAPIError: | ||||
|   | ||||
| @@ -17,9 +17,9 @@ | ||||
|   "mqtt": ["esphome/discover/#"], | ||||
|   "quality_scale": "platinum", | ||||
|   "requirements": [ | ||||
|     "aioesphomeapi==41.12.0", | ||||
|     "aioesphomeapi==41.11.0", | ||||
|     "esphome-dashboard-api==1.3.0", | ||||
|     "bleak-esphome==3.4.0" | ||||
|     "bleak-esphome==3.3.0" | ||||
|   ], | ||||
|   "zeroconf": ["_esphomelib._tcp.local."] | ||||
| } | ||||
|   | ||||
| @@ -10,6 +10,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||||
|     """Set up Filter from a config entry.""" | ||||
|  | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) | ||||
|     entry.async_on_unload(entry.add_update_listener(update_listener)) | ||||
|  | ||||
|     return True | ||||
|  | ||||
| @@ -17,3 +18,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: | ||||
|     """Unload Filter config entry.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) | ||||
|  | ||||
|  | ||||
| async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: | ||||
|     """Handle options update.""" | ||||
|     await hass.config_entries.async_reload(entry.entry_id) | ||||
|   | ||||
| @@ -246,7 +246,6 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): | ||||
|  | ||||
|     config_flow = CONFIG_FLOW | ||||
|     options_flow = OPTIONS_FLOW | ||||
|     options_flow_reloads = True | ||||
|  | ||||
|     def async_config_entry_title(self, options: Mapping[str, Any]) -> str: | ||||
|         """Return config entry title.""" | ||||
|   | ||||
| @@ -1,27 +0,0 @@ | ||||
| """The Firefly III integration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from homeassistant.const import Platform | ||||
| from homeassistant.core import HomeAssistant | ||||
|  | ||||
| from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator | ||||
|  | ||||
| _PLATFORMS: list[Platform] = [Platform.SENSOR] | ||||
|  | ||||
|  | ||||
| async def async_setup_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool: | ||||
|     """Set up Firefly III from a config entry.""" | ||||
|  | ||||
|     coordinator = FireflyDataUpdateCoordinator(hass, entry) | ||||
|     await coordinator.async_config_entry_first_refresh() | ||||
|  | ||||
|     entry.runtime_data = coordinator | ||||
|     await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS) | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
| async def async_unload_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool: | ||||
|     """Unload a config entry.""" | ||||
|     return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) | ||||
| @@ -1,140 +0,0 @@ | ||||
| """Config flow for the Firefly III integration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Mapping | ||||
| import logging | ||||
| from typing import Any | ||||
|  | ||||
| from pyfirefly import ( | ||||
|     Firefly, | ||||
|     FireflyAuthenticationError, | ||||
|     FireflyConnectionError, | ||||
|     FireflyTimeoutError, | ||||
| ) | ||||
| import voluptuous as vol | ||||
|  | ||||
| from homeassistant.config_entries import ConfigFlow, ConfigFlowResult | ||||
| from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import HomeAssistantError | ||||
| from homeassistant.helpers.aiohttp_client import async_get_clientsession | ||||
|  | ||||
| from .const import DOMAIN | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| STEP_USER_DATA_SCHEMA = vol.Schema( | ||||
|     { | ||||
|         vol.Required(CONF_URL): str, | ||||
|         vol.Optional(CONF_VERIFY_SSL, default=True): bool, | ||||
|         vol.Required(CONF_API_KEY): str, | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool: | ||||
|     """Validate the user input allows us to connect.""" | ||||
|  | ||||
|     try: | ||||
|         client = Firefly( | ||||
|             api_url=data[CONF_URL], | ||||
|             api_key=data[CONF_API_KEY], | ||||
|             session=async_get_clientsession(hass), | ||||
|         ) | ||||
|         await client.get_about() | ||||
|     except FireflyAuthenticationError: | ||||
|         raise InvalidAuth from None | ||||
|     except FireflyConnectionError as err: | ||||
|         raise CannotConnect from err | ||||
|     except FireflyTimeoutError as err: | ||||
|         raise FireflyClientTimeout from err | ||||
|  | ||||
|     return True | ||||
|  | ||||
|  | ||||
| class FireflyConfigFlow(ConfigFlow, domain=DOMAIN): | ||||
|     """Handle a config flow for Firefly III.""" | ||||
|  | ||||
|     VERSION = 1 | ||||
|  | ||||
|     async def async_step_user( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle the initial step.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         if user_input is not None: | ||||
|             self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]}) | ||||
|             try: | ||||
|                 await _validate_input(self.hass, user_input) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except InvalidAuth: | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except FireflyClientTimeout: | ||||
|                 errors["base"] = "timeout_connect" | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unexpected exception") | ||||
|                 errors["base"] = "unknown" | ||||
|             else: | ||||
|                 return self.async_create_entry( | ||||
|                     title=user_input[CONF_URL], data=user_input | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors | ||||
|         ) | ||||
|  | ||||
|     async def async_step_reauth( | ||||
|         self, entry_data: Mapping[str, Any] | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Perform reauth when Firefly III API authentication fails.""" | ||||
|         return await self.async_step_reauth_confirm() | ||||
|  | ||||
|     async def async_step_reauth_confirm( | ||||
|         self, user_input: dict[str, Any] | None = None | ||||
|     ) -> ConfigFlowResult: | ||||
|         """Handle reauth: ask for a new API key and validate.""" | ||||
|         errors: dict[str, str] = {} | ||||
|         reauth_entry = self._get_reauth_entry() | ||||
|         if user_input is not None: | ||||
|             try: | ||||
|                 await _validate_input( | ||||
|                     self.hass, | ||||
|                     data={ | ||||
|                         **reauth_entry.data, | ||||
|                         CONF_API_KEY: user_input[CONF_API_KEY], | ||||
|                     }, | ||||
|                 ) | ||||
|             except CannotConnect: | ||||
|                 errors["base"] = "cannot_connect" | ||||
|             except InvalidAuth: | ||||
|                 errors["base"] = "invalid_auth" | ||||
|             except FireflyClientTimeout: | ||||
|                 errors["base"] = "timeout_connect" | ||||
|             except Exception: | ||||
|                 _LOGGER.exception("Unexpected exception") | ||||
|                 errors["base"] = "unknown" | ||||
|             else: | ||||
|                 return self.async_update_reload_and_abort( | ||||
|                     reauth_entry, | ||||
|                     data_updates={CONF_API_KEY: user_input[CONF_API_KEY]}, | ||||
|                 ) | ||||
|  | ||||
|         return self.async_show_form( | ||||
|             step_id="reauth_confirm", | ||||
|             data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), | ||||
|             errors=errors, | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class CannotConnect(HomeAssistantError): | ||||
|     """Error to indicate we cannot connect.""" | ||||
|  | ||||
|  | ||||
| class InvalidAuth(HomeAssistantError): | ||||
|     """Error to indicate there is invalid auth.""" | ||||
|  | ||||
|  | ||||
| class FireflyClientTimeout(HomeAssistantError): | ||||
|     """Error to indicate a timeout occurred.""" | ||||
| @@ -1,6 +0,0 @@ | ||||
| """Constants for the Firefly III integration.""" | ||||
|  | ||||
| DOMAIN = "firefly_iii" | ||||
|  | ||||
| MANUFACTURER = "Firefly III" | ||||
| NAME = "Firefly III" | ||||
| @@ -1,137 +0,0 @@ | ||||
| """Data Update Coordinator for Firefly III integration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from dataclasses import dataclass | ||||
| from datetime import datetime, timedelta | ||||
| import logging | ||||
|  | ||||
| from aiohttp import CookieJar | ||||
| from pyfirefly import ( | ||||
|     Firefly, | ||||
|     FireflyAuthenticationError, | ||||
|     FireflyConnectionError, | ||||
|     FireflyTimeoutError, | ||||
| ) | ||||
| from pyfirefly.models import Account, Bill, Budget, Category, Currency | ||||
|  | ||||
| from homeassistant.config_entries import ConfigEntry | ||||
| from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL | ||||
| from homeassistant.core import HomeAssistant | ||||
| from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady | ||||
| from homeassistant.helpers.aiohttp_client import async_create_clientsession | ||||
| from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed | ||||
|  | ||||
| from .const import DOMAIN | ||||
|  | ||||
| _LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| type FireflyConfigEntry = ConfigEntry[FireflyDataUpdateCoordinator] | ||||
|  | ||||
| DEFAULT_SCAN_INTERVAL = timedelta(minutes=5) | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class FireflyCoordinatorData: | ||||
|     """Data structure for Firefly III coordinator data.""" | ||||
|  | ||||
|     accounts: list[Account] | ||||
|     categories: list[Category] | ||||
|     category_details: list[Category] | ||||
|     budgets: list[Budget] | ||||
|     bills: list[Bill] | ||||
|     primary_currency: Currency | ||||
|  | ||||
|  | ||||
| class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]): | ||||
|     """Coordinator to manage data updates for Firefly III integration.""" | ||||
|  | ||||
|     config_entry: FireflyConfigEntry | ||||
|  | ||||
|     def __init__(self, hass: HomeAssistant, config_entry: FireflyConfigEntry) -> None: | ||||
|         """Initialize the coordinator.""" | ||||
|         super().__init__( | ||||
|             hass, | ||||
|             _LOGGER, | ||||
|             config_entry=config_entry, | ||||
|             name=DOMAIN, | ||||
|             update_interval=DEFAULT_SCAN_INTERVAL, | ||||
|         ) | ||||
|         self.firefly = Firefly( | ||||
|             api_url=self.config_entry.data[CONF_URL], | ||||
|             api_key=self.config_entry.data[CONF_API_KEY], | ||||
|             session=async_create_clientsession( | ||||
|                 self.hass, | ||||
|                 self.config_entry.data[CONF_VERIFY_SSL], | ||||
|                 cookie_jar=CookieJar(unsafe=True), | ||||
|             ), | ||||
|         ) | ||||
|  | ||||
|     async def _async_setup(self) -> None: | ||||
|         """Set up the coordinator.""" | ||||
|         try: | ||||
|             await self.firefly.get_about() | ||||
|         except FireflyAuthenticationError as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="invalid_auth", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|         except FireflyConnectionError as err: | ||||
|             raise ConfigEntryNotReady( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="cannot_connect", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|         except FireflyTimeoutError as err: | ||||
|             raise ConfigEntryNotReady( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="timeout_connect", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|  | ||||
|     async def _async_update_data(self) -> FireflyCoordinatorData: | ||||
|         """Fetch data from Firefly III API.""" | ||||
|         now = datetime.now() | ||||
|         start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) | ||||
|         end_date = now | ||||
|  | ||||
|         try: | ||||
|             accounts = await self.firefly.get_accounts() | ||||
|             categories = await self.firefly.get_categories() | ||||
|             category_details = [ | ||||
|                 await self.firefly.get_category( | ||||
|                     category_id=int(category.id), start=start_date, end=end_date | ||||
|                 ) | ||||
|                 for category in categories | ||||
|             ] | ||||
|             primary_currency = await self.firefly.get_currency_primary() | ||||
|             budgets = await self.firefly.get_budgets() | ||||
|             bills = await self.firefly.get_bills() | ||||
|         except FireflyAuthenticationError as err: | ||||
|             raise ConfigEntryAuthFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="invalid_auth", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|         except FireflyConnectionError as err: | ||||
|             raise UpdateFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="cannot_connect", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|         except FireflyTimeoutError as err: | ||||
|             raise UpdateFailed( | ||||
|                 translation_domain=DOMAIN, | ||||
|                 translation_key="timeout_connect", | ||||
|                 translation_placeholders={"error": repr(err)}, | ||||
|             ) from err | ||||
|  | ||||
|         return FireflyCoordinatorData( | ||||
|             accounts=accounts, | ||||
|             categories=categories, | ||||
|             category_details=category_details, | ||||
|             budgets=budgets, | ||||
|             bills=bills, | ||||
|             primary_currency=primary_currency, | ||||
|         ) | ||||
| @@ -1,40 +0,0 @@ | ||||
| """Base entity for Firefly III integration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from yarl import URL | ||||
|  | ||||
| from homeassistant.const import CONF_URL | ||||
| from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo | ||||
| from homeassistant.helpers.entity import EntityDescription | ||||
| from homeassistant.helpers.update_coordinator import CoordinatorEntity | ||||
|  | ||||
| from .const import DOMAIN, MANUFACTURER | ||||
| from .coordinator import FireflyDataUpdateCoordinator | ||||
|  | ||||
|  | ||||
| class FireflyBaseEntity(CoordinatorEntity[FireflyDataUpdateCoordinator]): | ||||
|     """Base class for Firefly III entity.""" | ||||
|  | ||||
|     _attr_has_entity_name = True | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         coordinator: FireflyDataUpdateCoordinator, | ||||
|         entity_description: EntityDescription, | ||||
|     ) -> None: | ||||
|         """Initialize a Firefly entity.""" | ||||
|         super().__init__(coordinator) | ||||
|  | ||||
|         self.entity_description = entity_description | ||||
|         self._attr_device_info = DeviceInfo( | ||||
|             entry_type=DeviceEntryType.SERVICE, | ||||
|             manufacturer=MANUFACTURER, | ||||
|             configuration_url=URL(coordinator.config_entry.data[CONF_URL]), | ||||
|             identifiers={ | ||||
|                 ( | ||||
|                     DOMAIN, | ||||
|                     f"{coordinator.config_entry.entry_id}_{self.entity_description.key}", | ||||
|                 ) | ||||
|             }, | ||||
|         ) | ||||
| @@ -1,18 +0,0 @@ | ||||
| { | ||||
|   "entity": { | ||||
|     "sensor": { | ||||
|       "account_type": { | ||||
|         "default": "mdi:bank", | ||||
|         "state": { | ||||
|           "expense": "mdi:cash-minus", | ||||
|           "revenue": "mdi:cash-plus", | ||||
|           "asset": "mdi:account-cash", | ||||
|           "liability": "mdi:hand-coin" | ||||
|         } | ||||
|       }, | ||||
|       "category": { | ||||
|         "default": "mdi:label" | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| } | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user