mirror of
https://github.com/home-assistant/core.git
synced 2026-05-13 10:03:57 +00:00
Compare commits
555 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7d7738303a | |||
| dd0cdc4fc4 | |||
| 18ea40c46d | |||
| a23131efc8 | |||
| 4940a0abae | |||
| 5f98d5ae52 | |||
| ba18cded30 | |||
| fb7504e9df | |||
| 106f815a1e | |||
| 167757762b | |||
| 3a902e1a16 | |||
| 85c11672d8 | |||
| 89649df20d | |||
| 7b749b95ce | |||
| cc140be85c | |||
| e1ad765414 | |||
| 44b1fea745 | |||
| 5dd04363b2 | |||
| 03aa979309 | |||
| 6fabbb354b | |||
| f644448d0f | |||
| 4e61581cd8 | |||
| 6f87d02b72 | |||
| 348f6149b4 | |||
| a4227ef1bc | |||
| aac49a567f | |||
| 76b878b136 | |||
| 2d05931683 | |||
| b10582b0a9 | |||
| b193d951d7 | |||
| 4cd0d9dcec | |||
| 32f65b2e11 | |||
| 8c79d1e44b | |||
| 8d53f7a520 | |||
| cc83ee88fb | |||
| 0c5b02eff3 | |||
| 9da9f8fd50 | |||
| d70ffcd3e9 | |||
| 3e26d0dfe3 | |||
| eab9747b32 | |||
| 9e955d8294 | |||
| f08cd01ff8 | |||
| eabaf3b0fe | |||
| 65ca790d15 | |||
| d177944f7a | |||
| 7f186f4430 | |||
| 4f4f4642a7 | |||
| 12e443cd31 | |||
| 22a7daabe7 | |||
| c139e99abd | |||
| 2bfdb96a3f | |||
| 4b24ca924b | |||
| 1d3d714e4f | |||
| ffae6eda8a | |||
| 4dd996b728 | |||
| afad1e8dac | |||
| 8e41933251 | |||
| c581eaad53 | |||
| 3050e79d06 | |||
| 0e8ecd1065 | |||
| 94732139f4 | |||
| c5e08b2409 | |||
| c12e1b5f4a | |||
| 6cfedb55e6 | |||
| af4cb9530b | |||
| 58e97e7d5f | |||
| 2945b51617 | |||
| 9d0e2df627 | |||
| 643ae080db | |||
| a7eaa51179 | |||
| e15852ff38 | |||
| f6dec34136 | |||
| 53905fbc49 | |||
| 8218ff0fe8 | |||
| 663f7e3e6b | |||
| 4dfa2b8b88 | |||
| f828b165b1 | |||
| c56c506648 | |||
| 8e5bf2a35f | |||
| 4d575e69a4 | |||
| 4f78bbccc0 | |||
| 2d66ebe54a | |||
| a3e1209778 | |||
| 7c44a0b88d | |||
| 126058e0fa | |||
| 28742822cb | |||
| 179d370c2a | |||
| 2d8f3691cf | |||
| ce4fc9e880 | |||
| 9e357e7e5a | |||
| ed35b23e62 | |||
| 191d2d1f12 | |||
| b165d8251f | |||
| 5e8886aeb7 | |||
| bdb66635f8 | |||
| 5ba6e348da | |||
| ed52b0ce80 | |||
| 33ee3d6967 | |||
| f36676c32c | |||
| 77beddb1e7 | |||
| 1677e410b3 | |||
| 1be09347cd | |||
| c30ac2c0f3 | |||
| 145c7435a5 | |||
| 60f3b3bcc0 | |||
| 03e6d3bd30 | |||
| ee4d150e13 | |||
| 148603a10e | |||
| 1dbd933d3c | |||
| f7ee7423fe | |||
| 6322f1e37a | |||
| 0d8c7fbb9d | |||
| 70e30b02a4 | |||
| ebd21ea9b2 | |||
| 9aa092cd34 | |||
| b274fe85b7 | |||
| 777c36998c | |||
| a3977428f9 | |||
| 2d626c263c | |||
| d1461f2e68 | |||
| 3b778d2cc7 | |||
| 67b7d17a2f | |||
| 1afeadc342 | |||
| f6aa4e2092 | |||
| 3b00c5bb96 | |||
| ef7eed579b | |||
| 568a0085fe | |||
| f5363db26f | |||
| 3be1aa5441 | |||
| 7dbffb7375 | |||
| 07c4025d47 | |||
| 3e3e425aa5 | |||
| 162a4fc385 | |||
| ef6fd92079 | |||
| 4ad71a070a | |||
| f33ad12f5e | |||
| da7fbb0dd6 | |||
| 81137345a3 | |||
| d3e77d4195 | |||
| ce977e90a5 | |||
| 2871b87344 | |||
| d82ce1e22d | |||
| b8bb2e0090 | |||
| 1b81cfe3ca | |||
| 0a3f0d90c3 | |||
| 84d566a02c | |||
| 0e0d54e4b6 | |||
| 5b05061def | |||
| e0bf76769a | |||
| 63868bc169 | |||
| b8b7169371 | |||
| 1cc778954f | |||
| 3ba3ecdef3 | |||
| 5c57fc6e14 | |||
| 2da440043a | |||
| 4f34725e53 | |||
| d03bec2f44 | |||
| 57c37fc10c | |||
| fd98594143 | |||
| 894547abed | |||
| b48060674c | |||
| 6f2aa7852a | |||
| 9d53645468 | |||
| a3f1c067f7 | |||
| cef97973d0 | |||
| 7bb297a3fc | |||
| 7e2b8e1a48 | |||
| 013c5e7f7c | |||
| 7cb1d5b8ab | |||
| 57d9e8ea6f | |||
| 32743fcf8d | |||
| f4637db26d | |||
| b4bfe6b80b | |||
| 278f25ec6e | |||
| 39d3bc3e53 | |||
| bb41a2df9f | |||
| 284242b90e | |||
| a95c216983 | |||
| d41a3ae0cd | |||
| 0dfbe3ef84 | |||
| 71fc725d75 | |||
| d41c9aee52 | |||
| 8091f511b8 | |||
| a7baedc22b | |||
| 05bfb3a52e | |||
| 2a5b95ba4d | |||
| 3dd972cc7a | |||
| acd9dd218a | |||
| 6552cf8f7a | |||
| e4e4785225 | |||
| d531ce8d1d | |||
| 0224928655 | |||
| 05121b89c6 | |||
| 326895f0a1 | |||
| 45121eddf1 | |||
| 5e4f8f8bff | |||
| b9bbe36af0 | |||
| b56cdb9106 | |||
| e975496145 | |||
| cdeb550b87 | |||
| 62082bdf14 | |||
| 891efeb9cb | |||
| dc8abff6b9 | |||
| aa7474839b | |||
| 06a96712f6 | |||
| 97be8f485a | |||
| a9c23ff445 | |||
| cd92cb1258 | |||
| c3f01b3a23 | |||
| 4b232be04a | |||
| cd5e21d3ac | |||
| 84d5085f3b | |||
| 44e94a82f1 | |||
| fe0da5c34f | |||
| c0200084ec | |||
| ef63ab5def | |||
| 3683607820 | |||
| 4c70fef2da | |||
| d956af095e | |||
| ea34fe4107 | |||
| e1c81c9b9e | |||
| 4ea0e6b240 | |||
| 0ae5a19602 | |||
| 80c7e47c42 | |||
| dfe4085189 | |||
| 65a12b48e7 | |||
| cd639b829c | |||
| ea5b633574 | |||
| 2f2413c979 | |||
| 799bcb0f88 | |||
| d3cf5d9aab | |||
| d2fddf129d | |||
| d19c2506bf | |||
| 8fd3d0bb44 | |||
| d62f136c58 | |||
| 86e8b9df9b | |||
| aa5e942528 | |||
| 6636e67af6 | |||
| 30f310fc24 | |||
| de4e1c444e | |||
| eaf72106f8 | |||
| b90a074fb4 | |||
| 3aea7f0695 | |||
| ba8b1b2daf | |||
| 5ff1c15df3 | |||
| 0280d921e5 | |||
| 955e8362e4 | |||
| 1fc0b620c0 | |||
| ab08153d62 | |||
| b47b7fa58c | |||
| c50676dee9 | |||
| 96bd991bb8 | |||
| 7e2a7b9393 | |||
| eb2217cfa6 | |||
| b2269b3dba | |||
| eb85d7cd98 | |||
| 6663717d59 | |||
| 33e5a96a57 | |||
| 7cb4d5ca9c | |||
| 7dacd0080b | |||
| 7f44fe031c | |||
| 9656aaa6bd | |||
| bf4b865e83 | |||
| 73dcc2f5a8 | |||
| fa0cf37e2c | |||
| fa6c6ee4fc | |||
| 4eb000d863 | |||
| d3809dd4cb | |||
| 2f3a6243f7 | |||
| f36799d139 | |||
| 8d5f83e5f1 | |||
| 308cb686d2 | |||
| 63d4f4d03d | |||
| d8a4b36381 | |||
| c048af2e4e | |||
| 1a25864890 | |||
| d1922189aa | |||
| 7594ead857 | |||
| 8ce14877a4 | |||
| 2fb0de3cdb | |||
| 3e77a4bfb2 | |||
| 4b9dd68fe7 | |||
| f21ed9054b | |||
| 53e4d6c8fc | |||
| 6a67c0faf7 | |||
| d590f4f0b5 | |||
| 45a6134209 | |||
| 6893d2b13d | |||
| 370babf542 | |||
| 6c89ecb98b | |||
| cc1eaa72a6 | |||
| 21a3c5b0ed | |||
| 080eb6af84 | |||
| 663538c492 | |||
| d119bbe4ef | |||
| 0eb204508c | |||
| ad836b48b0 | |||
| 9cc9f240e7 | |||
| 91d5c080de | |||
| 9390bf3414 | |||
| a5b65766db | |||
| 9321ff504c | |||
| 8a22e84db0 | |||
| 642206699d | |||
| 5d98f467fb | |||
| 54727a6f20 | |||
| ed371bc644 | |||
| 3cc6cc9519 | |||
| 2053e61a80 | |||
| 3673a80a37 | |||
| 0cc531e333 | |||
| 12280dbe63 | |||
| 84a5ba26d3 | |||
| 6ec4466ad7 | |||
| cb62562f5b | |||
| a381a3a741 | |||
| 6902504087 | |||
| 64f2fa42fc | |||
| 64c9a76fc8 | |||
| e9fc6b3e74 | |||
| 605eea6274 | |||
| 86af61d7b5 | |||
| 45978f41cd | |||
| 4d4e45854f | |||
| 43fa4f2646 | |||
| 5cedb0b726 | |||
| e3de695b99 | |||
| e684490219 | |||
| 70947c612c | |||
| 07c144841f | |||
| 392c46c028 | |||
| 5af3b361c8 | |||
| 040c960ced | |||
| f2787115d0 | |||
| 2dd1632fc3 | |||
| ed1aefc643 | |||
| f1bbe4204b | |||
| 929379799c | |||
| d20d1df382 | |||
| b5a1b592e9 | |||
| 6384e6b38d | |||
| cd98577eb7 | |||
| fa9a336725 | |||
| aa0199b442 | |||
| a506be4be0 | |||
| e78a79a29e | |||
| eed4acc745 | |||
| f1fcca2c75 | |||
| 8673694f6f | |||
| e8e9914ef5 | |||
| 77c7225750 | |||
| 595f041143 | |||
| 2c4f598c06 | |||
| 1bf77e095d | |||
| d832abc5fc | |||
| e7dae028ba | |||
| e19d0e75c3 | |||
| 306fc529f2 | |||
| c1894eda83 | |||
| e9ca9254df | |||
| 9ccc2e7473 | |||
| d1bdd6eeeb | |||
| 8e3070afe1 | |||
| c48502afda | |||
| 77df31fa83 | |||
| f06cd25f4a | |||
| 19ebb1da2a | |||
| f225d8162b | |||
| 759ac2eacd | |||
| b474a42844 | |||
| db76773727 | |||
| 48b650c486 | |||
| 9e1c02262e | |||
| 5a79dd9d99 | |||
| c3f66f9e90 | |||
| 77fd120cd5 | |||
| 1978c9772a | |||
| 6862b808ae | |||
| 757deb3a1c | |||
| 54e3c3fc9b | |||
| e509c9b78a | |||
| 2eb9f69d1e | |||
| 2278423758 | |||
| 4625176606 | |||
| e7aa672133 | |||
| 99185bf9a4 | |||
| b5e66bbcd0 | |||
| 2deb364ab0 | |||
| 822b97d096 | |||
| 1e0dc86eea | |||
| ca70abe240 | |||
| f479b0ad6a | |||
| 458b5fe8bf | |||
| 9621307cb0 | |||
| 4507f9a8d8 | |||
| 19dd68b7fc | |||
| 245b9ed4c0 | |||
| 838feef660 | |||
| ca4d36db1a | |||
| 39b690b22c | |||
| ed560f0ba7 | |||
| 0db50acb89 | |||
| 446d89aee2 | |||
| 6fe1862d15 | |||
| 4f5d0a7305 | |||
| f84bf99105 | |||
| 7fad242ad0 | |||
| fcd6f78f35 | |||
| 056ff957e8 | |||
| 9cf95404cf | |||
| 4d8acfa61c | |||
| 9369a5dc93 | |||
| 8b2afb4e66 | |||
| a53d3ea9eb | |||
| e422c08d4e | |||
| 599fe252ef | |||
| aad93fd577 | |||
| a19aebed16 | |||
| c9d8257465 | |||
| 5ee6a2181f | |||
| ec18e0c6d4 | |||
| c4426b9476 | |||
| c4fd458d03 | |||
| 1fec38ef28 | |||
| 9c4b6951ef | |||
| 7d2f303035 | |||
| c61c09fba3 | |||
| 83c807d01c | |||
| 1b0386ddfc | |||
| 0af6a85049 | |||
| 7bd0bc9c8a | |||
| b200930fd4 | |||
| 5c046a3750 | |||
| c1a013d718 | |||
| f1f6cdae2a | |||
| f98de4618a | |||
| 6b2033b060 | |||
| d9dc2bbae4 | |||
| 82a1884085 | |||
| c46f6721bc | |||
| ed3ff38d30 | |||
| fd3e12a85f | |||
| 1e0a0b70f4 | |||
| 2598dde7aa | |||
| 9af7fe22bd | |||
| 546eef2eee | |||
| d65b7ce2f3 | |||
| b09671a409 | |||
| 412771465d | |||
| 3a72bc23b9 | |||
| b3d7ba5ce5 | |||
| 4e7b6838eb | |||
| 437f5ef66c | |||
| f886b03e14 | |||
| 190ee49e3a | |||
| f7c5a51f46 | |||
| e4e9c22016 | |||
| f2df848e3f | |||
| cdce98faaf | |||
| fde103cdfd | |||
| fcd6c6e335 | |||
| 8f2cec26e3 | |||
| 05463cde99 | |||
| a948799a6e | |||
| 624fab064a | |||
| a331cb7199 | |||
| 7d6eaf40a6 | |||
| 1ae9e7c87d | |||
| 6bcfc32d48 | |||
| 0b5f85bdb9 | |||
| d153eee822 | |||
| afcc2113ce | |||
| ae5bd63993 | |||
| 78107c478d | |||
| 84490ef0bb | |||
| 887e14638b | |||
| 818bde1d5e | |||
| 83da18b761 | |||
| bd904caea1 | |||
| 500f030eaa | |||
| ce755f5f8f | |||
| fb766d164b | |||
| 394670e33f | |||
| f79285f9ab | |||
| a422611ada | |||
| 4c34dcd560 | |||
| 1aca993c12 | |||
| a8cc099b66 | |||
| c56d67c02f | |||
| 0ce98cfb34 | |||
| 4a13ab9aff | |||
| dc65646d8b | |||
| 39fbdad775 | |||
| b4f6a43a14 | |||
| e5ff7a9944 | |||
| ca9945f750 | |||
| b028e2a6ae | |||
| 6f4aca495b | |||
| a892b5364d | |||
| f57e682a98 | |||
| 3493517b6d | |||
| b5842b8484 | |||
| 3333b8d019 | |||
| 745860553c | |||
| 7188a09a59 | |||
| 96a9b89412 | |||
| 586d7ab526 | |||
| 5f2fe4ffd4 | |||
| 040192c103 | |||
| e85430105e | |||
| 5c7c0a6e83 | |||
| c7bd673d01 | |||
| 6d3a93df81 | |||
| 6a934b5fe3 | |||
| d644348dc8 | |||
| dbfde9266c | |||
| ed0b68ec4a | |||
| c32d523f63 | |||
| 98a4e27e35 | |||
| fb1365e9a4 | |||
| 850b034a5f | |||
| b880876e0e | |||
| ab601e5717 | |||
| 7eda592c72 | |||
| b981ece163 | |||
| 7ea931fdc8 | |||
| f3038a20af | |||
| de234c7190 | |||
| 399681984f | |||
| 5ca14ca7d7 | |||
| ac53cfa85a | |||
| 02f1a9c3a9 | |||
| f93fdceac9 | |||
| 711a89f7b8 | |||
| 19e58c554e | |||
| feb6c2bfe6 | |||
| 6bb91422ff | |||
| 3bd699285b | |||
| 6d10305197 | |||
| 42a9c8488d | |||
| c6c273559e | |||
| f7394ce302 | |||
| 175dec6f1a | |||
| d137761cb5 | |||
| 8055cbc58d | |||
| c9dff27590 | |||
| c913a858b6 | |||
| 4ed33a804e | |||
| 8bf5674826 | |||
| b8a0b0083b | |||
| a57c101b5e | |||
| 957b8c1c52 | |||
| bb002d051b | |||
| 2b2fd4ac92 | |||
| f4c270629b |
@@ -15,7 +15,6 @@ description: Everything you need to know to build, test and review Home Assistan
|
||||
- For entity actions and entity services, avoid requesting redundant defensive checks for fields already enforced by Home Assistant validation schemas and entity filters; only request extra guards when values bypass validation or are transformed unsafely.
|
||||
- When validation guarantees a key is present, prefer direct dictionary indexing (`data["key"]`) over `.get("key")` so invalid assumptions fail fast.
|
||||
- Integrations should be thin wrappers. Protocol parsing, device state machines, or other domain logic belong in a separate PyPI library, not in the integration itself. If unsure, ask before inlining.
|
||||
- "potato" is a forbidden word for an integration and should never be used.
|
||||
|
||||
The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# Copilot code review instructions
|
||||
|
||||
- Start review comments with a short, one-sentence summary of the suggested fix.
|
||||
- Do not add comments about code style, formatting or linting issues.
|
||||
- Do not comment on code style, formatting or linting issues.
|
||||
|
||||
# GitHub Copilot & Claude Code Instructions
|
||||
|
||||
@@ -21,7 +21,7 @@ This repository contains the core of Home Assistant, a Python 3 based home autom
|
||||
|
||||
## Python Syntax Notes
|
||||
|
||||
- Python 3.14 explicitly allows `except TypeA, TypeB:` without parentheses.
|
||||
- Python 3.14 explicitly allows `except TypeA, TypeB:` without parentheses. Never flag this as an issue since Home Assistant officially supports Python 3.14.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -34,8 +34,3 @@ Integrations with Platinum or Gold level in the Integration Quality Scale reflec
|
||||
|
||||
When reviewing entity actions, do not suggest extra defensive checks for input fields that are already validated by Home Assistant's service/action schemas and entity selection filters. Suggest additional guards only when data bypasses those validators or is transformed into a less-safe form.
|
||||
When validation guarantees a dict key exists, prefer direct key access (`data["key"]`) instead of `.get("key")` so contract violations are surfaced instead of silently masked.
|
||||
|
||||
|
||||
# Skills
|
||||
|
||||
- ha-integration-knowledge: .claude/skills/ha-integration-knowledge/SKILL.md
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
---
|
||||
applyTo: "homeassistant/components/**, tests/components/**"
|
||||
excludeAgent: "cloud-agent"
|
||||
---
|
||||
|
||||
<!-- Automatically generated by gen_copilot_instructions.py, do not edit -->
|
||||
|
||||
|
||||
## File Locations
|
||||
- **Integration code**: `./homeassistant/components/<integration_domain>/`
|
||||
- **Integration tests**: `./tests/components/<integration_domain>/`
|
||||
|
||||
## General guidelines
|
||||
|
||||
- When looking for examples, prefer integrations with the platinum or gold quality scale level first.
|
||||
- Polling intervals are NOT user-configurable. Never add scan_interval, update_interval, or polling frequency options to config flows or config entries.
|
||||
- Do NOT allow users to set config entry names in config flows. Names are automatically generated or can be customized later in UI. Exception: helper integrations may allow custom names.
|
||||
- For entity actions and entity services, avoid requesting redundant defensive checks for fields already enforced by Home Assistant validation schemas and entity filters; only request extra guards when values bypass validation or are transformed unsafely.
|
||||
- When validation guarantees a key is present, prefer direct dictionary indexing (`data["key"]`) over `.get("key")` so invalid assumptions fail fast.
|
||||
- Integrations should be thin wrappers. Protocol parsing, device state machines, or other domain logic belong in a separate PyPI library, not in the integration itself. If unsure, ask before inlining.
|
||||
|
||||
The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
- **Repairs**: [`platform-repairs.md`](platform-repairs.md) for user-actionable repair issues
|
||||
|
||||
|
||||
## Integration Quality Scale
|
||||
|
||||
- When validating the quality scale rules, check them at https://developers.home-assistant.io/docs/core/integration-quality-scale/rules
|
||||
- When implementing or reviewing an integration, always consider the quality scale rules, since they promote best practices.
|
||||
|
||||
Template scale file: `./script/scaffold/templates/integration/integration/quality_scale.yaml`
|
||||
|
||||
### How Rules Apply
|
||||
1. **Check `manifest.json`**: Look for `"quality_scale"` key to determine integration level
|
||||
2. **Bronze Rules**: Always required for any integration with quality scale
|
||||
3. **Higher Tier Rules**: Only apply if integration targets that tier or higher
|
||||
4. **Rule Status**: Check `quality_scale.yaml` in integration folder for:
|
||||
- `done`: Rule implemented
|
||||
- `exempt`: Rule doesn't apply (with reason in comment)
|
||||
- `todo`: Rule needs implementation
|
||||
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- Tests should avoid interacting or mocking internal integration details. For more info, see https://developers.home-assistant.io/docs/development_testing/#writing-tests-for-integrations
|
||||
@@ -6,7 +6,7 @@
|
||||
"pep621",
|
||||
"pip_requirements",
|
||||
"pre-commit",
|
||||
"regex",
|
||||
"custom.regex",
|
||||
"homeassistant-manifest"
|
||||
],
|
||||
|
||||
@@ -27,8 +27,9 @@
|
||||
]
|
||||
},
|
||||
|
||||
"regexManagers": [
|
||||
"customManagers": [
|
||||
{
|
||||
"customType": "regex",
|
||||
"description": "Update ruff required-version in pyproject.toml",
|
||||
"managerFilePatterns": ["/^pyproject\\.toml$/"],
|
||||
"matchStrings": ["required-version = \">=(?<currentValue>[\\d.]+)\""],
|
||||
|
||||
@@ -366,7 +366,7 @@ jobs:
|
||||
echo "key=uv-${UV_CACHE_VERSION}-${uv_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -374,7 +374,8 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
id: cache-uv
|
||||
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -398,6 +399,7 @@ jobs:
|
||||
if: |
|
||||
steps.cache-venv.outputs.cache-hit != 'true'
|
||||
|| steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
id: install-os-deps
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
APT_CACHE_HIT: ${{ steps.cache-apt-check.outputs.cache-hit }}
|
||||
@@ -431,7 +433,10 @@ jobs:
|
||||
sudo chmod -R 755 ${APT_CACHE_BASE}
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
if: |
|
||||
always()
|
||||
&& steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
&& steps.install-os-deps.outcome == 'success'
|
||||
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
@@ -441,6 +446,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
id: create-venv
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
@@ -471,6 +477,26 @@ jobs:
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
- name: Save uv wheel cache
|
||||
if: |
|
||||
(success() && steps.cache-venv.outputs.cache-hit != 'true')
|
||||
|| (always()
|
||||
&& steps.create-venv.outcome == 'success'
|
||||
&& steps.cache-uv.outputs.cache-matched-key == '')
|
||||
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-uv-key.outputs.key }}
|
||||
- name: Save base Python virtual environment
|
||||
if: always() && steps.create-venv.outcome == 'success'
|
||||
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
|
||||
hassfest:
|
||||
name: Check hassfest
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.15.10
|
||||
rev: v0.15.12
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
@@ -12,7 +12,7 @@ This repository contains the core of Home Assistant, a Python 3 based home autom
|
||||
|
||||
## Python Syntax Notes
|
||||
|
||||
- Python 3.14 explicitly allows `except TypeA, TypeB:` without parentheses.
|
||||
- Python 3.14 explicitly allows `except TypeA, TypeB:` without parentheses. Never flag this as an issue since Home Assistant officially supports Python 3.14.
|
||||
|
||||
## Testing
|
||||
|
||||
|
||||
Generated
+8
-4
@@ -851,8 +851,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/input_select/ @home-assistant/core
|
||||
/homeassistant/components/input_text/ @home-assistant/core
|
||||
/tests/components/input_text/ @home-assistant/core
|
||||
/homeassistant/components/insteon/ @teharris1
|
||||
/tests/components/insteon/ @teharris1
|
||||
/homeassistant/components/insteon/ @teharris1 @ssyrell
|
||||
/tests/components/insteon/ @teharris1 @ssyrell
|
||||
/homeassistant/components/integration/ @dgomes
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intelliclima/ @dvdinth
|
||||
@@ -1203,6 +1203,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/notify_events/ @matrozov @papajojo
|
||||
/homeassistant/components/notion/ @bachya
|
||||
/tests/components/notion/ @bachya
|
||||
/homeassistant/components/novy_cooker_hood/ @piitaya
|
||||
/tests/components/novy_cooker_hood/ @piitaya
|
||||
/homeassistant/components/nrgkick/ @andijakl
|
||||
/tests/components/nrgkick/ @andijakl
|
||||
/homeassistant/components/nsw_fuel_station/ @nickw444
|
||||
@@ -1239,6 +1241,8 @@ CLAUDE.md @home-assistant/core
|
||||
/homeassistant/components/ollama/ @synesthesiam
|
||||
/tests/components/ollama/ @synesthesiam
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
/homeassistant/components/omie/ @luuuis
|
||||
/tests/components/omie/ @luuuis
|
||||
/homeassistant/components/onboarding/ @home-assistant/core
|
||||
/tests/components/onboarding/ @home-assistant/core
|
||||
/homeassistant/components/ondilo_ico/ @JeromeHXP
|
||||
@@ -1987,8 +1991,8 @@ CLAUDE.md @home-assistant/core
|
||||
/tests/components/wled/ @frenck @mik-laj
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
/tests/components/wmspro/ @mback2k
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
|
||||
/tests/components/wolflink/ @adamkrol93 @mtielen
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @EnjoyingM
|
||||
/tests/components/wolflink/ @adamkrol93 @EnjoyingM
|
||||
/homeassistant/components/workday/ @fabaff @gjohansson-ST
|
||||
/tests/components/workday/ @fabaff @gjohansson-ST
|
||||
/homeassistant/components/worldclock/ @fabaff
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -13,6 +14,9 @@ from .models import PermissionLookup
|
||||
from .types import PolicyType
|
||||
from .util import test_all
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..models import User
|
||||
|
||||
POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
|
||||
|
||||
__all__ = [
|
||||
@@ -22,10 +26,21 @@ __all__ = [
|
||||
"PermissionLookup",
|
||||
"PolicyPermissions",
|
||||
"PolicyType",
|
||||
"filter_entity_ids_by_permission",
|
||||
"merge_policies",
|
||||
]
|
||||
|
||||
|
||||
def filter_entity_ids_by_permission(
|
||||
user: User, entity_ids: Iterable[str], key: str
|
||||
) -> list[str]:
|
||||
"""Filter entity IDs to those the user can access for the given policy key."""
|
||||
if user.is_admin or user.permissions.access_all_entities(key):
|
||||
return list(entity_ids)
|
||||
check_entity = user.permissions.check_entity
|
||||
return [entity_id for entity_id in entity_ids if check_entity(entity_id, key)]
|
||||
|
||||
|
||||
class AbstractPermissions:
|
||||
"""Default permissions class."""
|
||||
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "sensereo",
|
||||
"name": "Sensereo",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "zunzunbee",
|
||||
"name": "Zunzunbee",
|
||||
"iot_standards": ["zigbee"]
|
||||
}
|
||||
@@ -143,4 +143,4 @@ class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor):
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available or self._restored_data is not None
|
||||
return super().available or self.native_value is not None
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||
from aiohttp import ClientError
|
||||
@@ -12,7 +12,7 @@ from aiohttp.client_exceptions import ClientConnectorError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
@@ -55,8 +55,11 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert accuweather.location_name is not None
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input
|
||||
title=accuweather.location_name, data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
@@ -70,9 +73,6 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
vol.Optional(
|
||||
CONF_LONGITUDE, default=self.hass.config.longitude
|
||||
): cv.longitude,
|
||||
vol.Optional(
|
||||
CONF_NAME, default=self.hass.config.location_name
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
|
||||
@@ -64,7 +64,7 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
name = config_entry.data[CONF_NAME]
|
||||
name = config_entry.data.get(CONF_NAME) or config_entry.title
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -122,7 +122,7 @@ class AccuWeatherForecastDataUpdateCoordinator(
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
self._fetch_method = fetch_method
|
||||
name = config_entry.data[CONF_NAME]
|
||||
name = config_entry.data.get(CONF_NAME) or config_entry.title
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
|
||||
@@ -25,8 +25,7 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "API key generated in the AccuWeather APIs portal."
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["serialx==1.4.1"]
|
||||
"requirements": ["serialx==1.7.1"]
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ HVAC_MODE_MAPPING_ACTRONAIR_TO_HA = {
|
||||
"HEAT": HVACMode.HEAT,
|
||||
"FAN": HVACMode.FAN_ONLY,
|
||||
"AUTO": HVACMode.AUTO,
|
||||
"DRY": HVACMode.DRY,
|
||||
"OFF": HVACMode.OFF,
|
||||
}
|
||||
HVAC_MODE_MAPPING_HA_TO_ACTRONAIR = {
|
||||
@@ -79,7 +80,6 @@ class ActronAirClimateEntity(ClimateEntity):
|
||||
)
|
||||
_attr_name = None
|
||||
_attr_fan_modes = list(FAN_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||
_attr_hvac_modes = list(HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||
|
||||
|
||||
class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
@@ -93,6 +93,17 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = self._serial_number
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the list of supported HVAC modes."""
|
||||
modes = [
|
||||
HVAC_MODE_MAPPING_ACTRONAIR_TO_HA[mode]
|
||||
for mode in self._status.user_aircon_settings.supported_modes
|
||||
if mode in HVAC_MODE_MAPPING_ACTRONAIR_TO_HA
|
||||
]
|
||||
modes.append(HVACMode.OFF)
|
||||
return modes
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature that can be set."""
|
||||
@@ -136,7 +147,7 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
|
||||
@property
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the target temperature."""
|
||||
return self._status.user_aircon_settings.temperature_setpoint_cool_c
|
||||
return self._status.user_aircon_settings.current_setpoint
|
||||
|
||||
@actron_air_command
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
@@ -179,6 +190,18 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
super().__init__(coordinator, zone)
|
||||
self._attr_unique_id: str = self._zone_identifier
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
"""Return the list of supported HVAC modes."""
|
||||
status = self.coordinator.data
|
||||
modes = [
|
||||
HVAC_MODE_MAPPING_ACTRONAIR_TO_HA[mode]
|
||||
for mode in status.user_aircon_settings.supported_modes
|
||||
if mode in HVAC_MODE_MAPPING_ACTRONAIR_TO_HA
|
||||
]
|
||||
modes.append(HVACMode.OFF)
|
||||
return modes
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature that can be set."""
|
||||
@@ -216,7 +239,7 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
return self._zone.temperature_setpoint_cool_c
|
||||
return self._zone.current_setpoint
|
||||
|
||||
@actron_air_command
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["actron-neo-api==0.5.3"]
|
||||
"requirements": ["actron-neo-api==0.5.6"]
|
||||
}
|
||||
|
||||
@@ -36,9 +36,7 @@ def _make_detected_condition(
|
||||
) -> type[Condition]:
|
||||
"""Create a detected condition for a binary sensor device class."""
|
||||
return make_entity_state_condition(
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)},
|
||||
STATE_ON,
|
||||
support_duration=True,
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)}, STATE_ON
|
||||
)
|
||||
|
||||
|
||||
@@ -47,9 +45,7 @@ def _make_cleared_condition(
|
||||
) -> type[Condition]:
|
||||
"""Create a cleared condition for a binary sensor device class."""
|
||||
return make_entity_state_condition(
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)},
|
||||
STATE_OFF,
|
||||
support_duration=True,
|
||||
{BINARY_SENSOR_DOMAIN: DomainSpec(device_class=device_class)}, STATE_OFF
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,11 +4,14 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
|
||||
.condition_for: &condition_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
# --- Unit lists for multi-unit pollutants ---
|
||||
|
||||
@@ -249,11 +252,7 @@
|
||||
.condition_binary_common: &condition_binary_common
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
for: *condition_for
|
||||
|
||||
is_gas_detected:
|
||||
<<: *condition_binary_common
|
||||
@@ -285,6 +284,7 @@ is_co_value:
|
||||
target: *target_co_sensor
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -299,6 +299,7 @@ is_ozone_value:
|
||||
target: *target_ozone
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -313,6 +314,7 @@ is_voc_value:
|
||||
target: *target_voc
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -327,6 +329,7 @@ is_voc_ratio_value:
|
||||
target: *target_voc_ratio
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -341,6 +344,7 @@ is_no_value:
|
||||
target: *target_no
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -355,6 +359,7 @@ is_no2_value:
|
||||
target: *target_no2
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -369,6 +374,7 @@ is_so2_value:
|
||||
target: *target_so2
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -385,6 +391,7 @@ is_co2_value:
|
||||
target: *target_co2
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -397,6 +404,7 @@ is_pm1_value:
|
||||
target: *target_pm1
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -409,6 +417,7 @@ is_pm25_value:
|
||||
target: *target_pm25
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -421,6 +430,7 @@ is_pm4_value:
|
||||
target: *target_pm4
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -433,6 +443,7 @@ is_pm10_value:
|
||||
target: *target_pm10
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -445,6 +456,7 @@ is_n2o_value:
|
||||
target: *target_n2o
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
|
||||
@@ -14,6 +14,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -50,6 +53,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -86,6 +92,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -98,6 +107,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -110,6 +122,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -122,6 +137,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -134,6 +152,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -146,6 +167,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -158,6 +182,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -170,6 +197,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -206,6 +236,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -218,6 +251,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -230,6 +266,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::air_quality::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::air_quality::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::air_quality::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -237,21 +276,6 @@
|
||||
"name": "Volatile organic compounds value"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Air Quality",
|
||||
"triggers": {
|
||||
"co2_changed": {
|
||||
|
||||
@@ -3,12 +3,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for: &trigger_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -12,11 +12,11 @@ from airly.exceptions import AirlyError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS
|
||||
from .const import CONF_USE_NEAREST, DEFAULT_NAME, DOMAIN, NO_AIRLY_SENSORS
|
||||
|
||||
DESCRIPTION_PLACEHOLDERS = {
|
||||
"developer_registration_url": "https://developer.airly.eu/register",
|
||||
@@ -45,16 +45,16 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
location_point_valid = await check_location(
|
||||
websession,
|
||||
user_input["api_key"],
|
||||
user_input["latitude"],
|
||||
user_input["longitude"],
|
||||
user_input[CONF_API_KEY],
|
||||
user_input[CONF_LATITUDE],
|
||||
user_input[CONF_LONGITUDE],
|
||||
)
|
||||
if not location_point_valid:
|
||||
location_nearest_valid = await check_location(
|
||||
websession,
|
||||
user_input["api_key"],
|
||||
user_input["latitude"],
|
||||
user_input["longitude"],
|
||||
user_input[CONF_API_KEY],
|
||||
user_input[CONF_LATITUDE],
|
||||
user_input[CONF_LONGITUDE],
|
||||
use_nearest=True,
|
||||
)
|
||||
except AirlyError as err:
|
||||
@@ -68,7 +68,7 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="wrong_location")
|
||||
use_nearest = True
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME],
|
||||
title=DEFAULT_NAME,
|
||||
data={**user_input, CONF_USE_NEAREST: use_nearest},
|
||||
)
|
||||
|
||||
@@ -83,9 +83,6 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
vol.Optional(
|
||||
CONF_LONGITUDE, default=self.hass.config.longitude
|
||||
): cv.longitude,
|
||||
vol.Optional(
|
||||
CONF_NAME, default=self.hass.config.location_name
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
|
||||
@@ -37,3 +37,5 @@ MAX_UPDATE_INTERVAL: Final = 90
|
||||
MIN_UPDATE_INTERVAL: Final = 5
|
||||
NO_AIRLY_SENSORS: Final = "There are no Airly sensors in this area yet."
|
||||
URL = "https://airly.org/map/#{latitude},{longitude}"
|
||||
|
||||
DEFAULT_NAME: Final = "Airly"
|
||||
|
||||
@@ -127,7 +127,7 @@ SENSOR_TYPES: tuple[AirlySensorEntityDescription, ...] = (
|
||||
),
|
||||
AirlySensorEntityDescription(
|
||||
key=ATTR_API_CO,
|
||||
translation_key="co",
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
@@ -178,7 +178,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airly sensor entities based on a config entry."""
|
||||
name = entry.data[CONF_NAME]
|
||||
name = entry.data.get(CONF_NAME) or entry.title
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
|
||||
@@ -13,8 +13,7 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
"longitude": "[%key:common::config_flow::data::longitude%]"
|
||||
},
|
||||
"description": "To generate API key go to {developer_registration_url}"
|
||||
}
|
||||
@@ -24,9 +23,6 @@
|
||||
"sensor": {
|
||||
"caqi": {
|
||||
"name": "Common air quality index"
|
||||
},
|
||||
"co": {
|
||||
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -4,7 +4,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.condition import (
|
||||
ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL_FOR,
|
||||
Condition,
|
||||
EntityStateConditionBase,
|
||||
make_entity_state_condition,
|
||||
@@ -26,7 +25,6 @@ class EntityStateRequiredFeaturesCondition(EntityStateConditionBase):
|
||||
"""State condition."""
|
||||
|
||||
_required_features: int
|
||||
_schema = ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL_FOR
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain with the required features."""
|
||||
@@ -84,11 +82,9 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"is_disarmed": make_entity_state_condition(
|
||||
DOMAIN, AlarmControlPanelState.DISARMED, support_duration=True
|
||||
),
|
||||
"is_disarmed": make_entity_state_condition(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"is_triggered": make_entity_state_condition(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED, support_duration=True
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,22 +1,14 @@
|
||||
.condition_common: &condition_common
|
||||
target: &condition_common_target
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields: &condition_common_fields
|
||||
behavior: &condition_common_behavior
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
.condition_common_for: &condition_common_for
|
||||
target: *condition_common_target
|
||||
fields: &condition_common_for_fields
|
||||
behavior: *condition_common_behavior
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
@@ -26,7 +18,7 @@
|
||||
is_armed: *condition_common
|
||||
|
||||
is_armed_away:
|
||||
fields: *condition_common_for_fields
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -34,7 +26,7 @@ is_armed_away:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
is_armed_home:
|
||||
fields: *condition_common_for_fields
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -42,7 +34,7 @@ is_armed_home:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
is_armed_night:
|
||||
fields: *condition_common_for_fields
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -50,13 +42,13 @@ is_armed_night:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
is_armed_vacation:
|
||||
fields: *condition_common_for_fields
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
|
||||
is_disarmed: *condition_common_for
|
||||
is_disarmed: *condition_common
|
||||
|
||||
is_triggered: *condition_common_for
|
||||
is_triggered: *condition_common
|
||||
|
||||
@@ -11,6 +11,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed"
|
||||
@@ -160,21 +163,6 @@
|
||||
"message": "Arming requires a code but none was given for {entity_id}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms an alarm in the away mode.",
|
||||
|
||||
@@ -7,12 +7,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -11,6 +11,7 @@ from .services import async_setup_services
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.NOTIFY,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
"""Support for buttons."""
|
||||
|
||||
from homeassistant.components.button import ButtonEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
from .entity import AmazonServiceEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities for Alexa Devices."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
known_routines: set[str] = set()
|
||||
|
||||
def _check_routines() -> None:
|
||||
current_routines = set(coordinator.api.routines)
|
||||
new_routines = current_routines - known_routines
|
||||
if new_routines:
|
||||
known_routines.update(new_routines)
|
||||
async_add_entities(
|
||||
AmazonRoutineButton(coordinator, routine) for routine in new_routines
|
||||
)
|
||||
|
||||
_check_routines()
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_routines))
|
||||
|
||||
|
||||
class AmazonRoutineButton(AmazonServiceEntity, ButtonEntity):
|
||||
"""Button entity for Alexa routine."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: AmazonDevicesCoordinator, routine: str) -> None:
|
||||
"""Initialize the routine button entity."""
|
||||
self._coordinator = coordinator
|
||||
self._routine = routine
|
||||
super().__init__(
|
||||
coordinator,
|
||||
EntityDescription(key=slugify(routine), name=routine),
|
||||
)
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle button press action."""
|
||||
await self._coordinator.api.call_routine(self._routine)
|
||||
@@ -12,12 +12,13 @@ from aioamazondevices.structures import AmazonDevice
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
@@ -64,6 +65,13 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
for identifier_domain, identifier in device.identifiers
|
||||
if identifier_domain == DOMAIN
|
||||
}
|
||||
self.previous_routines: set[str] = {
|
||||
routine.unique_id
|
||||
for routine in er.async_entries_for_config_entry(
|
||||
er.async_get(hass), entry.entry_id
|
||||
)
|
||||
if routine.domain == Platform.BUTTON
|
||||
}
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
@@ -92,8 +100,13 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
current_devices = set(data.keys())
|
||||
if stale_devices := self.previous_devices - current_devices:
|
||||
await self._async_remove_device_stale(stale_devices)
|
||||
|
||||
self.previous_devices = current_devices
|
||||
|
||||
current_routines = {slugify(routine) for routine in self.api.routines}
|
||||
if stale_routines := self.previous_routines - current_routines:
|
||||
await self._async_remove_routine_stale(stale_routines)
|
||||
self.previous_routines = current_routines
|
||||
|
||||
return data
|
||||
|
||||
async def _async_remove_device_stale(
|
||||
@@ -116,3 +129,23 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
||||
async def _async_remove_routine_stale(
|
||||
self,
|
||||
stale_routines: set[str],
|
||||
) -> None:
|
||||
"""Remove stale routine."""
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
for routine in stale_routines:
|
||||
_LOGGER.debug(
|
||||
"Detected change in routines: routine %s removed",
|
||||
routine,
|
||||
)
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
Platform.BUTTON,
|
||||
DOMAIN,
|
||||
f"{slugify(self.config_entry.unique_id)}-{slugify(routine)}",
|
||||
)
|
||||
if entity_id:
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AmazonDevicesCoordinator
|
||||
@@ -50,3 +51,32 @@ class AmazonEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
|
||||
and self._serial_num in self.coordinator.data
|
||||
and self.device.online
|
||||
)
|
||||
|
||||
|
||||
class AmazonServiceEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
|
||||
"""Defines Alexa Devices entity for service device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the service entity."""
|
||||
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, service_device_id(coordinator))},
|
||||
manufacturer="Amazon",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{slugify(coordinator.config_entry.unique_id)}-{description.key}"
|
||||
)
|
||||
|
||||
|
||||
def service_device_id(coordinator: AmazonDevicesCoordinator) -> str:
|
||||
"""Return service device id."""
|
||||
return slugify(f"{coordinator.config_entry.unique_id}_service_device")
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from collections import deque
|
||||
from collections.abc import AsyncIterator, Callable, Iterable
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import UTC, datetime
|
||||
import json
|
||||
@@ -20,18 +21,22 @@ from anthropic.types import (
|
||||
CitationWebSearchResultLocationParam,
|
||||
CodeExecutionTool20250825Param,
|
||||
CodeExecutionToolResultBlock,
|
||||
CodeExecutionToolResultBlockContent,
|
||||
CodeExecutionToolResultBlockParamContentParam,
|
||||
Container,
|
||||
ContentBlock,
|
||||
ContentBlockParam,
|
||||
DocumentBlockParam,
|
||||
ImageBlockParam,
|
||||
InputJSONDelta,
|
||||
JSONOutputFormatParam,
|
||||
Message,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
ModelInfo,
|
||||
OutputConfigParam,
|
||||
RawContentBlockDelta,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
@@ -68,18 +73,30 @@ from anthropic.types import (
|
||||
WebSearchTool20250305Param,
|
||||
WebSearchTool20260209Param,
|
||||
WebSearchToolResultBlock,
|
||||
WebSearchToolResultBlockContent,
|
||||
WebSearchToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.bash_code_execution_tool_result_block import (
|
||||
Content as BashCodeExecutionToolResultBlockContent,
|
||||
)
|
||||
from anthropic.types.bash_code_execution_tool_result_block_param import (
|
||||
Content as BashCodeExecutionToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
from anthropic.types.raw_message_delta_event import Delta
|
||||
from anthropic.types.text_editor_code_execution_tool_result_block import (
|
||||
Content as TextEditorCodeExecutionToolResultBlockContent,
|
||||
)
|
||||
from anthropic.types.text_editor_code_execution_tool_result_block_param import (
|
||||
Content as TextEditorCodeExecutionToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.tool_search_tool_result_block import (
|
||||
Content as ToolSearchToolResultBlockContent,
|
||||
)
|
||||
from anthropic.types.tool_search_tool_result_block_param import (
|
||||
Content as ToolSearchToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.tool_use_block import Caller
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -91,7 +108,7 @@ from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
from homeassistant.util.json import JsonArrayType, JsonObjectType
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
@@ -445,13 +462,7 @@ def _convert_content( # noqa: C901
|
||||
return messages, container_id
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: AsyncStream[MessageStreamEvent],
|
||||
output_tool: str | None = None,
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
class AnthropicDeltaStream:
|
||||
"""Transform the response stream into HA format.
|
||||
|
||||
A typical stream of responses might look something like the following:
|
||||
@@ -481,201 +492,376 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if stream is None or not hasattr(stream, "__aiter__"):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="unexpected_stream_object"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: AsyncStream[MessageStreamEvent],
|
||||
output_tool: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize the delta stream."""
|
||||
self._chat_log: conversation.ChatLog = chat_log
|
||||
self._stream: AsyncStream[MessageStreamEvent] = stream
|
||||
self._output_tool: str | None = output_tool
|
||||
|
||||
self._buffer: deque[
|
||||
conversation.AssistantContentDeltaDict
|
||||
| conversation.ToolResultContentDeltaDict
|
||||
] = deque()
|
||||
self._stream_iterator: AsyncIterator[MessageStreamEvent] | None = None
|
||||
|
||||
self._current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = (
|
||||
None
|
||||
)
|
||||
self._current_tool_args: str = ""
|
||||
self._content_details = ContentDetails()
|
||||
self._content_details.add_citation_detail()
|
||||
self._input_usage: Usage | None = None
|
||||
self._first_block: bool = True
|
||||
|
||||
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None
|
||||
current_tool_args: str
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
input_usage: Usage | None = None
|
||||
first_block: bool = True
|
||||
def __aiter__(
|
||||
self,
|
||||
) -> AsyncIterator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
"""Initialize the stream and return the async iterator."""
|
||||
if self._stream is None or not hasattr(self._stream, "__aiter__"):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="unexpected_stream_object"
|
||||
)
|
||||
if self._stream_iterator is None:
|
||||
self._stream_iterator = self._stream.__aiter__()
|
||||
return self
|
||||
|
||||
async for response in stream:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
async def __anext__(
|
||||
self,
|
||||
) -> (
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
):
|
||||
"""Get the next item from the stream."""
|
||||
while True:
|
||||
if self._buffer:
|
||||
return self._buffer.popleft()
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
input_usage = response.message.usage
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_tool_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input=response.content_block.input or {},
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
if first_block or content_details.has_content():
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
first_block
|
||||
or (
|
||||
not content_details.has_citations()
|
||||
and response.content_block.citations is None
|
||||
and content_details.has_content()
|
||||
)
|
||||
):
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
yield {"role": "assistant"}
|
||||
first_block = False
|
||||
content_details.add_citation_detail()
|
||||
if response.content_block.text:
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.content_block.text
|
||||
)
|
||||
yield {"content": response.content_block.text}
|
||||
elif isinstance(response.content_block, ThinkingBlock):
|
||||
if first_block or content_details.thinking_signature:
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
if first_block or content_details.redacted_thinking:
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
first_block = False
|
||||
content_details.redacted_thinking = response.content_block.data
|
||||
elif isinstance(response.content_block, ServerToolUseBlock):
|
||||
current_tool_block = ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input=response.content_block.input or {},
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(
|
||||
response.content_block,
|
||||
(
|
||||
WebSearchToolResultBlock,
|
||||
CodeExecutionToolResultBlock,
|
||||
BashCodeExecutionToolResultBlock,
|
||||
TextEditorCodeExecutionToolResultBlock,
|
||||
ToolSearchToolResultBlock,
|
||||
),
|
||||
):
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {
|
||||
"role": "tool_result",
|
||||
"tool_call_id": response.content_block.tool_use_id,
|
||||
"tool_name": response.content_block.type.removesuffix(
|
||||
"_tool_result"
|
||||
),
|
||||
"tool_result": {
|
||||
"content": cast(
|
||||
JsonObjectType, response.content_block.to_dict()["content"]
|
||||
)
|
||||
}
|
||||
if isinstance(response.content_block.content, list)
|
||||
else cast(JsonObjectType, response.content_block.content.to_dict()),
|
||||
response = await self._stream_iterator.__anext__() # type: ignore[union-attr]
|
||||
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
self.on_message_stream_event(response)
|
||||
|
||||
def on_message_stream_event(self, event: MessageStreamEvent) -> None:
|
||||
"""Handle MessageStreamEvent."""
|
||||
if isinstance(event, RawMessageStartEvent):
|
||||
self.on_message_start_event(event.message)
|
||||
return
|
||||
if isinstance(event, RawContentBlockStartEvent):
|
||||
self.on_content_block_start_event(event.content_block, event.index)
|
||||
return
|
||||
if isinstance(event, RawContentBlockDeltaEvent):
|
||||
self.on_content_block_delta_event(event.delta)
|
||||
return
|
||||
if isinstance(event, RawContentBlockStopEvent):
|
||||
self.on_content_block_stop_event(event.index)
|
||||
return
|
||||
if isinstance(event, RawMessageDeltaEvent):
|
||||
self.on_message_delta_event(event.delta, event.usage)
|
||||
return
|
||||
if isinstance(event, RawMessageStopEvent):
|
||||
self.on_message_stop_event()
|
||||
return
|
||||
LOGGER.debug("Unhandled event type: %s", event.type) # type: ignore[unreachable] # pragma: no cover - All types are handled but we want to verify that
|
||||
|
||||
def on_message_start_event(self, message: Message) -> None:
|
||||
"""Handle RawMessageStartEvent."""
|
||||
self._input_usage = message.usage
|
||||
self._first_block = True
|
||||
|
||||
def on_content_block_start_event(
|
||||
self, content_block: ContentBlock, index: int
|
||||
) -> None:
|
||||
"""Handle RawContentBlockStartEvent."""
|
||||
if isinstance(content_block, ToolUseBlock):
|
||||
self.on_tool_use_block(
|
||||
content_block.id,
|
||||
content_block.input,
|
||||
content_block.name,
|
||||
content_block.caller,
|
||||
)
|
||||
return
|
||||
if isinstance(content_block, TextBlock):
|
||||
self.on_text_block(content_block.text, content_block.citations)
|
||||
return
|
||||
if isinstance(content_block, ThinkingBlock):
|
||||
self.on_thinking_block(content_block.thinking, content_block.signature)
|
||||
return
|
||||
if isinstance(content_block, RedactedThinkingBlock):
|
||||
self.on_redacted_thinking_block(content_block.data)
|
||||
return
|
||||
if isinstance(content_block, ServerToolUseBlock):
|
||||
self.on_server_tool_use_block(
|
||||
content_block.id,
|
||||
content_block.name,
|
||||
content_block.input,
|
||||
content_block.caller,
|
||||
)
|
||||
return
|
||||
if isinstance(
|
||||
content_block,
|
||||
(
|
||||
WebSearchToolResultBlock,
|
||||
CodeExecutionToolResultBlock,
|
||||
BashCodeExecutionToolResultBlock,
|
||||
TextEditorCodeExecutionToolResultBlock,
|
||||
ToolSearchToolResultBlock,
|
||||
),
|
||||
):
|
||||
self.on_server_tool_result_block(
|
||||
content_block.tool_use_id,
|
||||
content_block.type,
|
||||
content_block.content,
|
||||
content_block.caller if hasattr(content_block, "caller") else None,
|
||||
)
|
||||
return
|
||||
LOGGER.debug("Unhandled content block type: %s", content_block.type)
|
||||
|
||||
def on_tool_use_block(
|
||||
self, id: str, input: dict[str, Any], name: str, caller: Caller | None
|
||||
) -> None:
|
||||
"""Handle ToolUseBlock."""
|
||||
self._current_tool_block = ToolUseBlockParam(
|
||||
type="tool_use",
|
||||
id=id,
|
||||
name=name,
|
||||
input=input,
|
||||
)
|
||||
self._current_tool_args = ""
|
||||
if name == self._output_tool:
|
||||
if self._first_block or self._content_details.has_content():
|
||||
if self._content_details:
|
||||
self._content_details.delete_empty()
|
||||
self._buffer.append({"native": self._content_details})
|
||||
self._content_details = ContentDetails()
|
||||
self._content_details.add_citation_detail()
|
||||
self._buffer.append({"role": "assistant"})
|
||||
self._first_block = False
|
||||
|
||||
def on_text_block(self, text: str, citations: list[TextCitation] | None) -> None:
|
||||
"""Handle TextBlock."""
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
self._first_block
|
||||
or (
|
||||
not self._content_details.has_citations()
|
||||
and citations is None
|
||||
and self._content_details.has_content()
|
||||
)
|
||||
):
|
||||
if self._content_details:
|
||||
self._content_details.delete_empty()
|
||||
self._buffer.append({"native": self._content_details})
|
||||
self._content_details = ContentDetails()
|
||||
self._buffer.append({"role": "assistant"})
|
||||
self._first_block = False
|
||||
self._content_details.add_citation_detail()
|
||||
if text:
|
||||
self._content_details.citation_details[-1].length += len(text)
|
||||
self._buffer.append({"content": text})
|
||||
|
||||
def on_thinking_block(self, thinking: str, signature: str) -> None:
|
||||
"""Handle ThinkingBlock."""
|
||||
if self._first_block or self._content_details.thinking_signature:
|
||||
if self._content_details:
|
||||
self._content_details.delete_empty()
|
||||
self._buffer.append({"native": self._content_details})
|
||||
self._content_details = ContentDetails()
|
||||
self._content_details.add_citation_detail()
|
||||
self._buffer.append({"role": "assistant"})
|
||||
self._first_block = False
|
||||
|
||||
def on_redacted_thinking_block(self, data: str) -> None:
|
||||
"""Handle RedactedThinkingBlock."""
|
||||
LOGGER.debug(
|
||||
"Some of Claude’s internal reasoning has been automatically "
|
||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||
"responses"
|
||||
)
|
||||
if self._first_block or self._content_details.redacted_thinking:
|
||||
if self._content_details:
|
||||
self._content_details.delete_empty()
|
||||
self._buffer.append({"native": self._content_details})
|
||||
self._content_details = ContentDetails()
|
||||
self._content_details.add_citation_detail()
|
||||
self._buffer.append({"role": "assistant"})
|
||||
self._first_block = False
|
||||
self._content_details.redacted_thinking = data
|
||||
|
||||
def on_server_tool_use_block(
|
||||
self,
|
||||
id: str,
|
||||
name: Literal[
|
||||
"web_search",
|
||||
"web_fetch",
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
"tool_search_tool_regex",
|
||||
"tool_search_tool_bm25",
|
||||
],
|
||||
input: dict[str, Any],
|
||||
caller: Caller | None,
|
||||
) -> None:
|
||||
"""Handle ServerToolUseBlock."""
|
||||
self._current_tool_block = ServerToolUseBlockParam(
|
||||
type="server_tool_use",
|
||||
id=id,
|
||||
name=name,
|
||||
input=input,
|
||||
)
|
||||
self._current_tool_args = ""
|
||||
|
||||
def on_server_tool_result_block(
|
||||
self,
|
||||
tool_use_id: str,
|
||||
tool_name: Literal[
|
||||
"web_search_tool_result",
|
||||
"code_execution_tool_result",
|
||||
"bash_code_execution_tool_result",
|
||||
"text_editor_code_execution_tool_result",
|
||||
"tool_search_tool_result",
|
||||
],
|
||||
content: WebSearchToolResultBlockContent
|
||||
| CodeExecutionToolResultBlockContent
|
||||
| BashCodeExecutionToolResultBlockContent
|
||||
| TextEditorCodeExecutionToolResultBlockContent
|
||||
| ToolSearchToolResultBlockContent,
|
||||
caller: Caller | None,
|
||||
) -> None:
|
||||
"""Handle various server tool result blocks."""
|
||||
if self._content_details:
|
||||
self._content_details.delete_empty()
|
||||
self._buffer.append({"native": self._content_details})
|
||||
self._content_details = ContentDetails()
|
||||
self._content_details.add_citation_detail()
|
||||
self._buffer.append(
|
||||
{
|
||||
"role": "tool_result",
|
||||
"tool_call_id": tool_use_id,
|
||||
"tool_name": tool_name.removesuffix("_tool_result"),
|
||||
"tool_result": {
|
||||
"content": cast(JsonArrayType, [x.to_dict() for x in content])
|
||||
}
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
if (
|
||||
current_tool_block is not None
|
||||
and current_tool_block["name"] == output_tool
|
||||
):
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.delta.partial_json
|
||||
)
|
||||
yield {"content": response.delta.partial_json}
|
||||
else:
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
if response.delta.text:
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.delta.text
|
||||
)
|
||||
yield {"content": response.delta.text}
|
||||
elif isinstance(response.delta, ThinkingDelta):
|
||||
if response.delta.thinking:
|
||||
yield {"thinking_content": response.delta.thinking}
|
||||
elif isinstance(response.delta, SignatureDelta):
|
||||
content_details.thinking_signature = response.delta.signature
|
||||
elif isinstance(response.delta, CitationsDelta):
|
||||
content_details.add_citation(response.delta.citation)
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_block is not None:
|
||||
if current_tool_block["name"] == output_tool:
|
||||
current_tool_block = None
|
||||
continue
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_tool_block["input"] |= tool_args
|
||||
yield {
|
||||
if isinstance(content, list)
|
||||
else cast(JsonObjectType, content.to_dict()),
|
||||
}
|
||||
)
|
||||
self._first_block = True
|
||||
|
||||
def on_content_block_delta_event(self, delta: RawContentBlockDelta) -> None:
|
||||
"""Handle RawContentBlockDeltaEvent."""
|
||||
if isinstance(delta, InputJSONDelta):
|
||||
self.on_input_json_delta(delta.partial_json)
|
||||
return
|
||||
if isinstance(delta, TextDelta):
|
||||
self.on_text_delta(delta.text)
|
||||
return
|
||||
if isinstance(delta, ThinkingDelta):
|
||||
self.on_thinking_delta(delta.thinking)
|
||||
return
|
||||
if isinstance(delta, SignatureDelta):
|
||||
self.on_signature_delta(delta.signature)
|
||||
return
|
||||
if isinstance(delta, CitationsDelta):
|
||||
self.on_citations_delta(delta.citation)
|
||||
return
|
||||
LOGGER.debug("Unhandled content delta type: %s", delta.type) # type: ignore[unreachable] # pragma: no cover - All types are handled but we want to verify that
|
||||
|
||||
def on_input_json_delta(self, partial_json: str) -> None:
|
||||
"""Handle InputJSONDelta."""
|
||||
if (
|
||||
self._current_tool_block is not None
|
||||
and self._current_tool_block["name"] == self._output_tool
|
||||
):
|
||||
self._content_details.citation_details[-1].length += len(partial_json)
|
||||
self._buffer.append({"content": partial_json})
|
||||
else:
|
||||
self._current_tool_args += partial_json
|
||||
|
||||
def on_text_delta(self, text: str) -> None:
|
||||
"""Handle TextDelta."""
|
||||
if text:
|
||||
self._content_details.citation_details[-1].length += len(text)
|
||||
self._buffer.append({"content": text})
|
||||
|
||||
def on_thinking_delta(self, thinking: str) -> None:
|
||||
"""Handle ThinkingDelta."""
|
||||
if thinking:
|
||||
self._buffer.append({"thinking_content": thinking})
|
||||
|
||||
def on_signature_delta(self, signature: str) -> None:
|
||||
"""Handle SignatureDelta."""
|
||||
self._content_details.thinking_signature = signature
|
||||
|
||||
def on_citations_delta(self, citation: TextCitation) -> None:
|
||||
"""Handle CitationsDelta."""
|
||||
self._content_details.add_citation(citation)
|
||||
|
||||
def on_content_block_stop_event(self, index: int) -> None:
|
||||
"""Handle RawContentBlockStopEvent."""
|
||||
if self._current_tool_block is not None:
|
||||
if self._current_tool_block["name"] == self._output_tool:
|
||||
self._current_tool_block = None
|
||||
return
|
||||
tool_args = (
|
||||
json.loads(self._current_tool_args) if self._current_tool_args else {}
|
||||
)
|
||||
self._current_tool_block["input"] |= tool_args
|
||||
self._buffer.append(
|
||||
{
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_tool_block["id"],
|
||||
tool_name=current_tool_block["name"],
|
||||
tool_args=current_tool_block["input"],
|
||||
external=current_tool_block["type"] == "server_tool_use",
|
||||
id=self._current_tool_block["id"],
|
||||
tool_name=self._current_tool_block["name"],
|
||||
tool_args=self._current_tool_block["input"],
|
||||
external=self._current_tool_block["type"]
|
||||
== "server_tool_use",
|
||||
)
|
||||
]
|
||||
}
|
||||
current_tool_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
content_details.container = response.delta.container
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="api_refusal"
|
||||
)
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
)
|
||||
self._current_tool_block = None
|
||||
|
||||
def on_message_delta_event(self, delta: Delta, usage: MessageDeltaUsage) -> None:
|
||||
"""Handle RawMessageDeltaEvent."""
|
||||
self._chat_log.async_trace(self._create_token_stats(self._input_usage, usage))
|
||||
self._content_details.container = delta.container
|
||||
if delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="api_refusal"
|
||||
)
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
def on_message_stop_event(self) -> None:
|
||||
"""Handle RawMessageStopEvent."""
|
||||
if self._content_details:
|
||||
self._content_details.delete_empty()
|
||||
self._buffer.append({"native": self._content_details})
|
||||
self._content_details = ContentDetails()
|
||||
self._content_details.add_citation_detail()
|
||||
|
||||
def _create_token_stats(
|
||||
self, input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
@@ -963,7 +1149,7 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(
|
||||
AnthropicDeltaStream(
|
||||
chat_log,
|
||||
stream,
|
||||
output_tool=structure_name or None,
|
||||
|
||||
@@ -155,7 +155,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.data[DATA_COMPONENT] = storage_collection
|
||||
|
||||
collection.DictStorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
storage_collection,
|
||||
DOMAIN,
|
||||
DOMAIN,
|
||||
CREATE_FIELDS,
|
||||
UPDATE_FIELDS,
|
||||
admin_only=True,
|
||||
).async_setup(hass)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_integration_list)
|
||||
@@ -341,6 +346,7 @@ async def handle_integration_list(
|
||||
vol.Required("config_entry_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
async def handle_config_entry(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
|
||||
@@ -28,7 +28,7 @@ class AquacellEntity(CoordinatorEntity[AquacellCoordinator]):
|
||||
self._attr_unique_id = f"{softener_key}-{entity_key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=self.softener.name,
|
||||
hw_version=self.softener.fwVersion,
|
||||
hw_version=self.softener.diagnostics.fw_version,
|
||||
identifiers={(DOMAIN, str(softener_key))},
|
||||
manufacturer=self.softener.brand,
|
||||
model=self.softener.ssn,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioaquacell"],
|
||||
"requirements": ["aioaquacell==0.2.0"]
|
||||
"requirements": ["aioaquacell==1.0.0"]
|
||||
}
|
||||
|
||||
@@ -38,39 +38,39 @@ SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
|
||||
translation_key="salt_left_side_percentage",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda softener: softener.salt.leftPercent,
|
||||
value_fn=lambda softener: softener.salt.left_percent,
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="salt_right_side_percentage",
|
||||
translation_key="salt_right_side_percentage",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda softener: softener.salt.rightPercent,
|
||||
value_fn=lambda softener: softener.salt.right_percent,
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="salt_left_side_time_remaining",
|
||||
translation_key="salt_left_side_time_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
value_fn=lambda softener: softener.salt.leftDays,
|
||||
value_fn=lambda softener: softener.salt.left_days,
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="salt_right_side_time_remaining",
|
||||
translation_key="salt_right_side_time_remaining",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.DAYS,
|
||||
value_fn=lambda softener: softener.salt.rightDays,
|
||||
value_fn=lambda softener: softener.salt.right_days,
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda softener: softener.battery,
|
||||
value_fn=lambda softener: softener.diagnostics.battery,
|
||||
),
|
||||
SoftenerSensorEntityDescription(
|
||||
key="wi_fi_strength",
|
||||
translation_key="wi_fi_strength",
|
||||
value_fn=lambda softener: softener.wifiLevel,
|
||||
value_fn=lambda softener: softener.diagnostics.wifi_level,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[
|
||||
"high",
|
||||
@@ -82,7 +82,7 @@ SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
|
||||
key="last_update",
|
||||
translation_key="last_update",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
value_fn=lambda softener: softener.lastUpdate,
|
||||
value_fn=lambda softener: softener.diagnostics.last_update,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from arcam.fmj import IncomingVideoAspectRatio, IncomingVideoColorspace
|
||||
from arcam.fmj import IncomingVideoAspectRatio, IncomingVideoColorspace, IntOrTypeEnum
|
||||
from arcam.fmj.state import IncomingAudioConfig, IncomingAudioFormat, State
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -21,6 +22,25 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .coordinator import ArcamFmjConfigEntry
|
||||
from .entity import ArcamFmjEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _enum_options(value: type[IntOrTypeEnum]) -> list[str]:
|
||||
return [
|
||||
member.name.lower() for member in value if not member.name.startswith("CODE_")
|
||||
]
|
||||
|
||||
|
||||
def _enum_value(value: IntOrTypeEnum | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if value.name.startswith("CODE_"):
|
||||
_LOGGER.debug("Undefined enum value %s ignored", value)
|
||||
return None
|
||||
|
||||
return value.name.lower()
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ArcamFmjSensorEntityDescription(SensorEntityDescription):
|
||||
@@ -75,9 +95,9 @@ SENSORS: tuple[ArcamFmjSensorEntityDescription, ...] = (
|
||||
translation_key="incoming_video_aspect_ratio",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[member.name.lower() for member in IncomingVideoAspectRatio],
|
||||
options=_enum_options(IncomingVideoAspectRatio),
|
||||
value_fn=lambda state: (
|
||||
vp.aspect_ratio.name.lower()
|
||||
_enum_value(vp.aspect_ratio)
|
||||
if (vp := state.get_incoming_video_parameters()) is not None
|
||||
else None
|
||||
),
|
||||
@@ -87,11 +107,10 @@ SENSORS: tuple[ArcamFmjSensorEntityDescription, ...] = (
|
||||
translation_key="incoming_video_colorspace",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[member.name.lower() for member in IncomingVideoColorspace],
|
||||
options=_enum_options(IncomingVideoColorspace),
|
||||
value_fn=lambda state: (
|
||||
vp.colorspace.name.lower()
|
||||
_enum_value(vp.colorspace)
|
||||
if (vp := state.get_incoming_video_parameters()) is not None
|
||||
and vp.colorspace is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
@@ -100,24 +119,16 @@ SENSORS: tuple[ArcamFmjSensorEntityDescription, ...] = (
|
||||
translation_key="incoming_audio_format",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[member.name.lower() for member in IncomingAudioFormat],
|
||||
value_fn=lambda state: (
|
||||
result.name.lower()
|
||||
if (result := state.get_incoming_audio_format()[0]) is not None
|
||||
else None
|
||||
),
|
||||
options=_enum_options(IncomingAudioFormat),
|
||||
value_fn=lambda state: _enum_value(state.get_incoming_audio_format()[0]),
|
||||
),
|
||||
ArcamFmjSensorEntityDescription(
|
||||
key="incoming_audio_config",
|
||||
translation_key="incoming_audio_config",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[member.name.lower() for member in IncomingAudioConfig],
|
||||
value_fn=lambda state: (
|
||||
result.name.lower()
|
||||
if (result := state.get_incoming_audio_format()[1]) is not None
|
||||
else None
|
||||
),
|
||||
options=_enum_options(IncomingAudioConfig),
|
||||
value_fn=lambda state: _enum_value(state.get_incoming_audio_format()[1]),
|
||||
),
|
||||
ArcamFmjSensorEntityDescription(
|
||||
key="incoming_audio_sample_rate",
|
||||
|
||||
@@ -13,11 +13,12 @@ from hassil.util import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.permissions.const import CAT_ENTITIES, POLICY_CONTROL
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import HomeAssistantError, Unauthorized, UnknownUser
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -103,6 +104,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def handle_ask_question(call: ServiceCall) -> dict[str, Any]:
|
||||
"""Handle a Show View service call."""
|
||||
satellite_entity_id: str = call.data[ATTR_ENTITY_ID]
|
||||
if call.context.user_id:
|
||||
user = await hass.auth.async_get_user(call.context.user_id)
|
||||
if user is None:
|
||||
raise UnknownUser(
|
||||
context=call.context,
|
||||
permission=POLICY_CONTROL,
|
||||
user_id=call.context.user_id,
|
||||
)
|
||||
if not user.permissions.check_entity(satellite_entity_id, POLICY_CONTROL):
|
||||
raise Unauthorized(
|
||||
context=call.context,
|
||||
permission=POLICY_CONTROL,
|
||||
user_id=call.context.user_id,
|
||||
perm_category=CAT_ENTITIES,
|
||||
)
|
||||
|
||||
satellite_entity: AssistSatelliteEntity | None = component.get_entity(
|
||||
satellite_entity_id
|
||||
)
|
||||
|
||||
@@ -7,17 +7,13 @@ from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_idle": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.IDLE, support_duration=True
|
||||
),
|
||||
"is_listening": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.LISTENING, support_duration=True
|
||||
),
|
||||
"is_idle": make_entity_state_condition(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"is_listening": make_entity_state_condition(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"is_processing": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING, support_duration=True
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
),
|
||||
"is_responding": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING, support_duration=True
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -7,11 +7,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -72,19 +72,6 @@
|
||||
"id": "Answer ID",
|
||||
"sentences": "Sentences"
|
||||
}
|
||||
},
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -7,12 +7,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -165,6 +165,7 @@ async def websocket_set_wake_words(
|
||||
vol.Required("entity_id"): cv.entity_domain(DOMAIN),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
async def websocket_test_connection(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -15,24 +15,6 @@ from homeassistant.data_entry_flow import FlowContext
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
WS_TYPE_SETUP_MFA = "auth/setup_mfa"
|
||||
SCHEMA_WS_SETUP_MFA = vol.All(
|
||||
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required("type"): WS_TYPE_SETUP_MFA,
|
||||
vol.Exclusive("mfa_module_id", "module_or_flow_id"): str,
|
||||
vol.Exclusive("flow_id", "module_or_flow_id"): str,
|
||||
vol.Optional("user_input"): object,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("mfa_module_id", "flow_id"),
|
||||
)
|
||||
|
||||
WS_TYPE_DEPOSE_MFA = "auth/depose_mfa"
|
||||
SCHEMA_WS_DEPOSE_MFA = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): WS_TYPE_DEPOSE_MFA, vol.Required("mfa_module_id"): str}
|
||||
)
|
||||
|
||||
DATA_SETUP_FLOW_MGR: HassKey[MfaFlowManager] = HassKey("auth_mfa_setup_flow_manager")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -73,16 +55,24 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Init mfa setup flow manager."""
|
||||
hass.data[DATA_SETUP_FLOW_MGR] = MfaFlowManager(hass)
|
||||
|
||||
websocket_api.async_register_command(
|
||||
hass, WS_TYPE_SETUP_MFA, websocket_setup_mfa, SCHEMA_WS_SETUP_MFA
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(
|
||||
hass, WS_TYPE_DEPOSE_MFA, websocket_depose_mfa, SCHEMA_WS_DEPOSE_MFA
|
||||
)
|
||||
websocket_api.async_register_command(hass, websocket_setup_mfa)
|
||||
websocket_api.async_register_command(hass, websocket_depose_mfa)
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("type"): "auth/setup_mfa",
|
||||
vol.Exclusive("mfa_module_id", "module_or_flow_id"): str,
|
||||
vol.Exclusive("flow_id", "module_or_flow_id"): str,
|
||||
vol.Optional("user_input"): object,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("mfa_module_id", "flow_id"),
|
||||
)
|
||||
)
|
||||
@websocket_api.ws_require_user(allow_system_user=False)
|
||||
def websocket_setup_mfa(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
@@ -121,6 +111,9 @@ def websocket_setup_mfa(
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "auth/depose_mfa", vol.Required("mfa_module_id"): str}
|
||||
)
|
||||
@websocket_api.ws_require_user(allow_system_user=False)
|
||||
def websocket_depose_mfa(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
|
||||
@@ -4,10 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
import asyncio
|
||||
from collections.abc import Callable, Mapping
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Protocol, cast
|
||||
from typing import Any, cast
|
||||
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
@@ -194,6 +194,7 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"switch",
|
||||
"temperature",
|
||||
"text",
|
||||
"timer",
|
||||
"todo",
|
||||
"update",
|
||||
"vacuum",
|
||||
@@ -229,14 +230,11 @@ def is_disabled_experimental_trigger(hass: HomeAssistant, platform: str) -> bool
|
||||
)
|
||||
|
||||
|
||||
class IfAction(Protocol):
|
||||
class IfAction(condition_helper.ConditionsChecker):
|
||||
"""Define the format of if_action."""
|
||||
|
||||
config: list[ConfigType]
|
||||
|
||||
def __call__(self, variables: Mapping[str, Any] | None = None) -> bool:
|
||||
"""AND all conditions."""
|
||||
|
||||
|
||||
def is_on(hass: HomeAssistant, entity_id: str) -> bool:
|
||||
"""Return true if specified automation entity_id is on.
|
||||
@@ -835,7 +833,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
if (
|
||||
not skip_condition
|
||||
and self._condition is not None
|
||||
and not self._condition(variables)
|
||||
and not self._condition.async_check(variables=variables)
|
||||
):
|
||||
self._logger.debug(
|
||||
"Conditions not met, aborting automation. Condition summary: %s",
|
||||
@@ -903,7 +901,15 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove listeners when removing automation from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
await self._async_disable()
|
||||
if self.registry_entry and self.registry_entry.entity_id != self.entity_id:
|
||||
# Entity ID change, do not unload the script or conditions as they will
|
||||
# be reused.
|
||||
await self._async_disable()
|
||||
return
|
||||
await self._async_disable(stop_actions=False)
|
||||
await self.action_script.async_unload()
|
||||
if self._condition is not None:
|
||||
self._condition.async_unload()
|
||||
|
||||
async def _async_enable_automation(self, event: Event) -> None:
|
||||
"""Start automation on startup."""
|
||||
@@ -1276,6 +1282,7 @@ async def _async_process_if(
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "automation/config", "entity_id": str})
|
||||
@websocket_api.require_admin
|
||||
def websocket_config(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
|
||||
@@ -18,4 +18,10 @@ DEFAULT_STREAM_PROFILE = "No stream profile"
|
||||
DEFAULT_TRIGGER_TIME = 0
|
||||
DEFAULT_VIDEO_SOURCE = "No video source"
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CAMERA, Platform.LIGHT, Platform.SWITCH]
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CAMERA,
|
||||
Platform.EVENT,
|
||||
Platform.LIGHT,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
"""Support for Axis event entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from axis.models.event import Event, EventTopic
|
||||
|
||||
from homeassistant.components.event import (
|
||||
DoorbellEventType,
|
||||
EventDeviceClass,
|
||||
EventEntity,
|
||||
EventEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AxisConfigEntry
|
||||
from .entity import AxisEventDescription, AxisEventEntity
|
||||
|
||||
DOORBELL_CONFIG = ("I8116-E", "0")
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AxisEventPlatformDescription(AxisEventDescription, EventEntityDescription):
|
||||
"""Axis event entity description."""
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
AxisEventPlatformDescription(
|
||||
key="Doorbell",
|
||||
device_class=EventDeviceClass.DOORBELL,
|
||||
event_types=[DoorbellEventType.RING],
|
||||
event_topic=EventTopic.PORT_INPUT,
|
||||
name_fn=lambda _hub, _event: "Doorbell",
|
||||
supported_fn=lambda hub, event: (hub.config.model, event.id) == DOORBELL_CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AxisConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up an Axis event platform."""
|
||||
config_entry.runtime_data.entity_loader.register_platform(
|
||||
async_add_entities, AxisEvent, ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class AxisEvent(AxisEventEntity, EventEntity):
|
||||
"""Representation of an Axis event entity."""
|
||||
|
||||
entity_description: AxisEventPlatformDescription
|
||||
|
||||
@callback
|
||||
def async_event_callback(self, event: Event) -> None:
|
||||
"""Handle Axis event updates."""
|
||||
if event.is_tripped:
|
||||
self._trigger_event(DoorbellEventType.RING)
|
||||
self.async_write_ha_state()
|
||||
@@ -36,6 +36,7 @@ async def get_axis_api(
|
||||
username=config[CONF_USERNAME],
|
||||
password=config[CONF_PASSWORD],
|
||||
web_proto=config.get(CONF_PROTOCOL, "http"),
|
||||
websocket_enabled=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==68"],
|
||||
"requirements": ["axis==70"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
|
||||
@@ -30,7 +31,9 @@ async def _async_handle_create_automatic_service(call: ServiceCall) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services."""
|
||||
if not is_hassio(hass):
|
||||
hass.services.async_register(DOMAIN, "create", _async_handle_create_service)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "create_automatic", _async_handle_create_automatic_service
|
||||
async_register_admin_service(
|
||||
hass, DOMAIN, "create", _async_handle_create_service
|
||||
)
|
||||
async_register_admin_service(
|
||||
hass, DOMAIN, "create_automatic", _async_handle_create_automatic_service
|
||||
)
|
||||
|
||||
@@ -32,25 +32,21 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_low": make_entity_state_condition(
|
||||
BATTERY_DOMAIN_SPECS,
|
||||
STATE_ON,
|
||||
support_duration=True,
|
||||
primary_entities_only=False,
|
||||
),
|
||||
"is_not_low": make_entity_state_condition(
|
||||
BATTERY_DOMAIN_SPECS,
|
||||
STATE_OFF,
|
||||
support_duration=True,
|
||||
primary_entities_only=False,
|
||||
),
|
||||
"is_charging": make_entity_state_condition(
|
||||
BATTERY_CHARGING_DOMAIN_SPECS,
|
||||
STATE_ON,
|
||||
support_duration=True,
|
||||
primary_entities_only=False,
|
||||
),
|
||||
"is_not_charging": make_entity_state_condition(
|
||||
BATTERY_CHARGING_DOMAIN_SPECS,
|
||||
STATE_OFF,
|
||||
support_duration=True,
|
||||
primary_entities_only=False,
|
||||
),
|
||||
"is_level": make_entity_numerical_condition(
|
||||
|
||||
@@ -9,11 +9,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for: &condition_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
@@ -66,6 +63,7 @@ is_level:
|
||||
primary_entities_only: false
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
|
||||
@@ -26,6 +26,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::battery::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::battery::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::battery::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -69,21 +72,6 @@
|
||||
"name": "Battery is not low"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Battery",
|
||||
"triggers": {
|
||||
"level_changed": {
|
||||
|
||||
@@ -3,12 +3,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for: &trigger_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -33,11 +33,13 @@ from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN
|
||||
from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN
|
||||
from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_USER,
|
||||
ConfigEntry,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentry,
|
||||
ConfigSubentryData,
|
||||
ConfigSubentryFlow,
|
||||
FlowType,
|
||||
SubentryFlowContext,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
@@ -62,7 +64,6 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
|
||||
from .binary_sensor import above_greater_than_below, no_overlapping
|
||||
from .const import (
|
||||
CONF_OBSERVATIONS,
|
||||
CONF_P_GIVEN_F,
|
||||
CONF_P_GIVEN_T,
|
||||
CONF_PRIOR,
|
||||
@@ -373,26 +374,6 @@ def _validate_observation_subentry(
|
||||
return user_input
|
||||
|
||||
|
||||
async def _validate_subentry_from_config_entry(
|
||||
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
# Standard behavior is to merge the result with the options.
|
||||
# In this case, we want to add a subentry so we update the options directly.
|
||||
observations: list[dict[str, Any]] = handler.options.setdefault(
|
||||
CONF_OBSERVATIONS, []
|
||||
)
|
||||
|
||||
if handler.parent_handler.cur_step is not None:
|
||||
user_input[CONF_PLATFORM] = handler.parent_handler.cur_step["step_id"]
|
||||
user_input = _validate_observation_subentry(
|
||||
user_input[CONF_PLATFORM],
|
||||
user_input,
|
||||
other_subentries=handler.options[CONF_OBSERVATIONS],
|
||||
)
|
||||
observations.append(user_input)
|
||||
return {}
|
||||
|
||||
|
||||
async def _get_description_placeholders(
|
||||
handler: SchemaCommonFlowHandler,
|
||||
) -> dict[str, str]:
|
||||
@@ -420,48 +401,12 @@ async def _get_description_placeholders(
|
||||
}
|
||||
|
||||
|
||||
async def _get_observation_menu_options(handler: SchemaCommonFlowHandler) -> list[str]:
|
||||
"""Return the menu options for the observation selector."""
|
||||
options = [typ.value for typ in ObservationTypes]
|
||||
if handler.options.get(CONF_OBSERVATIONS):
|
||||
options.append("finish")
|
||||
return options
|
||||
|
||||
|
||||
CONFIG_FLOW: dict[str, SchemaFlowMenuStep | SchemaFlowFormStep] = {
|
||||
str(USER): SchemaFlowFormStep(
|
||||
CONFIG_SCHEMA,
|
||||
validate_user_input=_validate_user,
|
||||
next_step=str(OBSERVATION_SELECTOR),
|
||||
description_placeholders=_get_description_placeholders,
|
||||
),
|
||||
str(OBSERVATION_SELECTOR): SchemaFlowMenuStep(
|
||||
_get_observation_menu_options,
|
||||
),
|
||||
str(ObservationTypes.STATE): SchemaFlowFormStep(
|
||||
STATE_SUBSCHEMA,
|
||||
next_step=str(OBSERVATION_SELECTOR),
|
||||
validate_user_input=_validate_subentry_from_config_entry,
|
||||
# Prevent the name of the bayesian sensor from being used as the suggested
|
||||
# name of the observations
|
||||
suggested_values=None,
|
||||
description_placeholders=_get_description_placeholders,
|
||||
),
|
||||
str(ObservationTypes.NUMERIC_STATE): SchemaFlowFormStep(
|
||||
NUMERIC_STATE_SUBSCHEMA,
|
||||
next_step=str(OBSERVATION_SELECTOR),
|
||||
validate_user_input=_validate_subentry_from_config_entry,
|
||||
suggested_values=None,
|
||||
description_placeholders=_get_description_placeholders,
|
||||
),
|
||||
str(ObservationTypes.TEMPLATE): SchemaFlowFormStep(
|
||||
TEMPLATE_SUBSCHEMA,
|
||||
next_step=str(OBSERVATION_SELECTOR),
|
||||
validate_user_input=_validate_subentry_from_config_entry,
|
||||
suggested_values=None,
|
||||
description_placeholders=_get_description_placeholders,
|
||||
),
|
||||
"finish": SchemaFlowFormStep(),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -497,27 +442,17 @@ class BayesianConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
name: str = options[CONF_NAME]
|
||||
return name
|
||||
|
||||
@callback
|
||||
def async_create_entry(
|
||||
self,
|
||||
data: Mapping[str, Any],
|
||||
**kwargs: Any,
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish config flow and create a config entry."""
|
||||
data = dict(data)
|
||||
observations = data.pop(CONF_OBSERVATIONS)
|
||||
subentries: list[ConfigSubentryData] = [
|
||||
ConfigSubentryData(
|
||||
data=observation,
|
||||
title=observation[CONF_NAME],
|
||||
subentry_type="observation",
|
||||
unique_id=None,
|
||||
)
|
||||
for observation in observations
|
||||
]
|
||||
|
||||
self.async_config_flow_finished(data)
|
||||
return super().async_create_entry(data=data, subentries=subentries, **kwargs)
|
||||
async def async_on_create_entry(self, result: ConfigFlowResult) -> ConfigFlowResult:
|
||||
"""Start subentry flow when config entry has been created."""
|
||||
subentry_result = await self.hass.config_entries.subentries.async_init(
|
||||
(result["result"].entry_id, "observation"),
|
||||
context=SubentryFlowContext(source=SOURCE_USER),
|
||||
)
|
||||
result["next_flow"] = (
|
||||
FlowType.CONFIG_SUBENTRIES_FLOW,
|
||||
subentry_result["flow_id"],
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class ObservationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
@@ -85,7 +85,9 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity):
|
||||
if position == -1: # possible for shutterBox
|
||||
return None
|
||||
|
||||
return None if position is None else 100 - position
|
||||
if position is None:
|
||||
return None
|
||||
return 100 - position if self._feature.is_position_inverted else position
|
||||
|
||||
@property
|
||||
def current_cover_tilt_position(self) -> int | None:
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["blebox_uniapi"],
|
||||
"requirements": ["blebox-uniapi==2.5.1"],
|
||||
"requirements": ["blebox-uniapi==2.5.3"],
|
||||
"zeroconf": ["_bbxsrv._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -21,9 +21,6 @@
|
||||
"save_video": {
|
||||
"service": "mdi:file-video"
|
||||
},
|
||||
"send_pin": {
|
||||
"service": "mdi:two-factor-authentication"
|
||||
},
|
||||
"trigger_camera": {
|
||||
"service": "mdi:image-refresh"
|
||||
}
|
||||
|
||||
@@ -5,15 +5,9 @@ from __future__ import annotations
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.const import (
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_PIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir, service
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -23,50 +17,10 @@ SERVICE_SAVE_VIDEO = "save_video"
|
||||
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
||||
|
||||
|
||||
# Deprecated
|
||||
SERVICE_SEND_PIN = "send_pin"
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _send_pin(call: ServiceCall) -> None:
|
||||
"""Call blink to send new pin."""
|
||||
# Create repair issue to inform user about service removal
|
||||
ir.async_create_issue(
|
||||
call.hass,
|
||||
DOMAIN,
|
||||
"service_send_pin_deprecation",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
breaks_in_ha_version="2026.5.0",
|
||||
translation_key="service_send_pin_deprecation",
|
||||
translation_placeholders={"service_name": f"{DOMAIN}.{SERVICE_SEND_PIN}"},
|
||||
)
|
||||
|
||||
# Service has been removed - raise exception
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_removed",
|
||||
translation_placeholders={"service_name": f"{DOMAIN}.{SERVICE_SEND_PIN}"},
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Blink integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_PIN,
|
||||
_send_pin,
|
||||
schema=SERVICE_SEND_PIN_SCHEMA,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
|
||||
@@ -35,15 +35,3 @@ save_recent_clips:
|
||||
example: "/tmp"
|
||||
selector:
|
||||
text:
|
||||
|
||||
send_pin:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: blink
|
||||
pin:
|
||||
example: "abc123"
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -82,9 +82,6 @@
|
||||
},
|
||||
"not_loaded": {
|
||||
"message": "{target} is not loaded."
|
||||
},
|
||||
"service_removed": {
|
||||
"message": "The service {service_name} has been removed and is no longer needed. Home Assistant will automatically prompt for reauthentication when required."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
@@ -98,10 +95,6 @@
|
||||
}
|
||||
},
|
||||
"title": "Blink update service is being removed"
|
||||
},
|
||||
"service_send_pin_deprecation": {
|
||||
"description": "The service {service_name} has been removed and is no longer needed. When a new two-factor authentication code is required, Home Assistant will automatically prompt you to reauthenticate through the integration configuration. Please remove any automations or scripts that call this service.",
|
||||
"title": "Blink send PIN service has been removed"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -140,20 +133,6 @@
|
||||
},
|
||||
"name": "Save video"
|
||||
},
|
||||
"send_pin": {
|
||||
"description": "Sends a new PIN to Blink for 2FA.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The Blink integration ID.",
|
||||
"name": "Integration ID"
|
||||
},
|
||||
"pin": {
|
||||
"description": "PIN received from Blink. Leave empty if you only received a verification email.",
|
||||
"name": "PIN"
|
||||
}
|
||||
},
|
||||
"name": "Send PIN"
|
||||
},
|
||||
"trigger_camera": {
|
||||
"description": "Requests camera to take new image.",
|
||||
"name": "Trigger camera"
|
||||
|
||||
@@ -58,6 +58,7 @@ from .api import (
|
||||
async_address_present,
|
||||
async_ble_device_from_address,
|
||||
async_clear_address_from_match_history,
|
||||
async_clear_advertisement_history,
|
||||
async_current_scanners,
|
||||
async_discovered_service_info,
|
||||
async_get_advertisement_callback,
|
||||
@@ -116,6 +117,7 @@ __all__ = [
|
||||
"async_address_present",
|
||||
"async_ble_device_from_address",
|
||||
"async_clear_address_from_match_history",
|
||||
"async_clear_advertisement_history",
|
||||
"async_current_scanners",
|
||||
"async_discovered_service_info",
|
||||
"async_get_advertisement_callback",
|
||||
|
||||
@@ -207,6 +207,19 @@ def async_clear_address_from_match_history(hass: HomeAssistant, address: str) ->
|
||||
_get_manager(hass).async_clear_address_from_match_history(address)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_clear_advertisement_history(hass: HomeAssistant, address: str) -> None:
|
||||
"""Clear cached advertisement history for a device.
|
||||
|
||||
Causes the next advertisement from this address to be treated as new
|
||||
data, bypassing the change-detection guard in the Bluetooth manager.
|
||||
Intended for devices that emit static advertisements as a wake-up
|
||||
signal, for example, devices that require an active GATT connection
|
||||
to read sensor data and whose advertisement payload never changes.
|
||||
"""
|
||||
_get_manager(hass).async_clear_advertisement_history(address)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_register_scanner(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bring-api==1.1.1"]
|
||||
"requirements": ["bring-api==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ DOMAIN = "broadlink"
|
||||
|
||||
DOMAINS_AND_TYPES = {
|
||||
Platform.CLIMATE: {"HYS"},
|
||||
Platform.INFRARED: {"RM4MINI", "RM4PRO", "RMMINI", "RMMINIB", "RMPRO"},
|
||||
Platform.LIGHT: {"LB1", "LB2"},
|
||||
Platform.RADIO_FREQUENCY: {"RM4PRO", "RMPRO"},
|
||||
Platform.REMOTE: {"RM4MINI", "RM4PRO", "RMMINI", "RMMINIB", "RMPRO"},
|
||||
Platform.SELECT: {"HYS"},
|
||||
Platform.SENSOR: {
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
"""Infrared platform for Broadlink remotes."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from broadlink.exceptions import BroadlinkException
|
||||
from broadlink.remote import pulses_to_data as _bl_pulses_to_data
|
||||
|
||||
from homeassistant.components.infrared import InfraredCommand, InfraredEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import BroadlinkEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .device import BroadlinkDevice
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
def _timings_to_broadlink_packet(timings: list[int]) -> bytes:
|
||||
"""Convert signed microsecond timings to a Broadlink IR packet.
|
||||
|
||||
Positive values are pulse (high) durations; negative values are space
|
||||
(low) durations. The Broadlink library's encoder expects absolute
|
||||
durations.
|
||||
"""
|
||||
pulses = [abs(t) for t in timings]
|
||||
return _bl_pulses_to_data(pulses)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Broadlink infrared entity."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
async_add_entities([BroadlinkInfraredEntity(device)])
|
||||
|
||||
|
||||
class BroadlinkInfraredEntity(BroadlinkEntity, InfraredEntity):
|
||||
"""Broadlink infrared transmitter entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "infrared_emitter"
|
||||
|
||||
def __init__(self, device: BroadlinkDevice) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(device)
|
||||
self._attr_unique_id = f"{device.unique_id}-emitter"
|
||||
|
||||
async def async_send_command(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command via the Broadlink device."""
|
||||
packet = _timings_to_broadlink_packet(command.get_raw_timings())
|
||||
try:
|
||||
await self._device.async_request(self._device.api.send_data, packet)
|
||||
except (BroadlinkException, OSError) as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="send_command_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
@@ -0,0 +1,132 @@
|
||||
"""Radio Frequency platform for Broadlink."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from broadlink.exceptions import BroadlinkException
|
||||
from rf_protocols import RadioFrequencyCommand
|
||||
|
||||
from homeassistant.components.radio_frequency import RadioFrequencyTransmitterEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .device import BroadlinkDevice
|
||||
from .entity import BroadlinkEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_TICK_US = 32.84
|
||||
|
||||
_RF_433_TYPE_BYTE = 0xB2
|
||||
_RF_315_TYPE_BYTE = 0xB4
|
||||
|
||||
_RF_433_RANGE = (433_050_000, 434_790_000)
|
||||
_RF_315_RANGE = (314_950_000, 315_250_000)
|
||||
|
||||
SUPPORTED_FREQUENCY_RANGES: list[tuple[int, int]] = [_RF_433_RANGE, _RF_315_RANGE]
|
||||
|
||||
|
||||
def _type_byte_for_frequency(frequency: int) -> int:
|
||||
"""Return the Broadlink RF type byte for a given carrier frequency."""
|
||||
if _RF_433_RANGE[0] <= frequency <= _RF_433_RANGE[1]:
|
||||
return _RF_433_TYPE_BYTE
|
||||
if _RF_315_RANGE[0] <= frequency <= _RF_315_RANGE[1]:
|
||||
return _RF_315_TYPE_BYTE
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="frequency_not_supported",
|
||||
translation_placeholders={"frequency": f"{frequency / 1_000_000:g}"},
|
||||
)
|
||||
|
||||
|
||||
def encode_rf_packet(
|
||||
*,
|
||||
type_byte: int,
|
||||
repeat_count: int,
|
||||
timings_us: list[int],
|
||||
) -> bytes:
|
||||
"""Encode raw OOK timings as a Broadlink RF pulse-length packet.
|
||||
|
||||
The layout is::
|
||||
|
||||
byte 0 type byte (0xB2 for 433 MHz, 0xB4 for 315 MHz)
|
||||
byte 1 repeat count (additional transmissions after the first)
|
||||
bytes 2..3 payload length (little-endian), counted from byte 4
|
||||
bytes 4..N-1 pulses: 1 byte when ticks < 256, otherwise
|
||||
0x00 followed by a 2-byte big-endian tick count
|
||||
|
||||
Each pulse is expressed as multiples of 32.84 µs ticks, which is the
|
||||
timing resolution of the Broadlink RF front-end.
|
||||
"""
|
||||
buf = bytearray([type_byte, repeat_count, 0, 0])
|
||||
for duration in timings_us:
|
||||
ticks = round(abs(duration) / _TICK_US)
|
||||
div, mod = divmod(ticks, 256)
|
||||
if div:
|
||||
buf.append(0x00)
|
||||
buf.append(div)
|
||||
buf.append(mod)
|
||||
payload_len = len(buf) - 4
|
||||
buf[2] = payload_len & 0xFF
|
||||
buf[3] = (payload_len >> 8) & 0xFF
|
||||
return bytes(buf)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up a Broadlink radio frequency transmitter."""
|
||||
# Uses legacy hass.data[DOMAIN] pattern
|
||||
# pylint: disable-next=hass-use-runtime-data
|
||||
device: BroadlinkDevice = hass.data[DOMAIN].devices[config_entry.entry_id]
|
||||
async_add_entities([BroadlinkRadioFrequency(device)])
|
||||
|
||||
|
||||
class BroadlinkRadioFrequency(BroadlinkEntity, RadioFrequencyTransmitterEntity):
|
||||
"""Representation of a Broadlink RF transmitter."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, device: BroadlinkDevice) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(device)
|
||||
self._attr_unique_id = device.unique_id
|
||||
|
||||
@property
|
||||
def supported_frequency_ranges(self) -> list[tuple[int, int]]:
|
||||
"""Return the Broadlink-supported narrow RF bands."""
|
||||
return SUPPORTED_FREQUENCY_RANGES
|
||||
|
||||
async def async_send_command(self, command: RadioFrequencyCommand) -> None:
|
||||
"""Encode an OOK command and transmit it via the Broadlink device."""
|
||||
type_byte = _type_byte_for_frequency(command.frequency)
|
||||
packet = encode_rf_packet(
|
||||
type_byte=type_byte,
|
||||
repeat_count=command.repeat_count,
|
||||
timings_us=command.get_raw_timings(),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Transmitting RF packet: %d bytes on %d Hz (repeat=%d)",
|
||||
len(packet),
|
||||
command.frequency,
|
||||
command.repeat_count,
|
||||
)
|
||||
|
||||
device = self._device
|
||||
try:
|
||||
await device.async_request(device.api.send_data, packet)
|
||||
except (BroadlinkException, OSError) as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="transmit_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
@@ -49,6 +49,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"infrared": {
|
||||
"infrared_emitter": {
|
||||
"name": "IR emitter"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"day_of_week": {
|
||||
"name": "Day of week",
|
||||
@@ -77,5 +82,16 @@
|
||||
"name": "Total consumption"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"frequency_not_supported": {
|
||||
"message": "Broadlink devices cannot transmit on {frequency} MHz"
|
||||
},
|
||||
"send_command_failed": {
|
||||
"message": "Failed to send IR command: {error}"
|
||||
},
|
||||
"transmit_failed": {
|
||||
"message": "Failed to transmit RF command: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,9 +293,8 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = (
|
||||
),
|
||||
BrotherSensorEntityDescription(
|
||||
key="uptime",
|
||||
translation_key="last_restart",
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
device_class=SensorDeviceClass.UPTIME,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value=lambda data: data.uptime,
|
||||
),
|
||||
|
||||
@@ -151,9 +151,6 @@
|
||||
"laser_remaining_life": {
|
||||
"name": "Laser remaining lifetime"
|
||||
},
|
||||
"last_restart": {
|
||||
"name": "Last restart"
|
||||
},
|
||||
"magenta_drum_page_counter": {
|
||||
"name": "Magenta drum page counter",
|
||||
"unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]"
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["python-bsblan==5.2.0"],
|
||||
"requirements": ["python-bsblan==5.2.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -15,7 +15,10 @@ from aiohttp import web
|
||||
from dateutil.rrule import rrulestr
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_CONTROL, POLICY_READ
|
||||
from homeassistant.components import frontend, http, websocket_api
|
||||
from homeassistant.components.http import KEY_HASS_USER
|
||||
from homeassistant.components.websocket_api import (
|
||||
ERR_INVALID_FORMAT,
|
||||
ERR_NOT_FOUND,
|
||||
@@ -32,7 +35,7 @@ from homeassistant.core import (
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.exceptions import HomeAssistantError, Unauthorized
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
@@ -786,6 +789,10 @@ class CalendarEventView(http.HomeAssistantView):
|
||||
|
||||
async def get(self, request: web.Request, entity_id: str) -> web.Response:
|
||||
"""Return calendar events."""
|
||||
user: User = request[KEY_HASS_USER]
|
||||
if not user.permissions.check_entity(entity_id, POLICY_READ):
|
||||
raise Unauthorized(entity_id=entity_id)
|
||||
|
||||
if not (entity := self.component.get_entity(entity_id)) or not isinstance(
|
||||
entity, CalendarEntity
|
||||
):
|
||||
@@ -837,10 +844,14 @@ class CalendarListView(http.HomeAssistantView):
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Retrieve calendar list."""
|
||||
user: User = request[KEY_HASS_USER]
|
||||
hass = request.app[http.KEY_HASS]
|
||||
entity_perm = user.permissions.check_entity
|
||||
calendar_list: list[dict[str, str]] = []
|
||||
|
||||
for entity in self.component.entities:
|
||||
if not entity_perm(entity.entity_id, POLICY_READ):
|
||||
continue
|
||||
state = hass.states.get(entity.entity_id)
|
||||
assert state
|
||||
calendar_list.append({"name": state.name, "entity_id": entity.entity_id})
|
||||
@@ -860,6 +871,9 @@ async def handle_calendar_event_create(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle creation of a calendar event."""
|
||||
if not connection.user.permissions.check_entity(msg["entity_id"], POLICY_CONTROL):
|
||||
raise Unauthorized(entity_id=msg["entity_id"])
|
||||
|
||||
if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])):
|
||||
connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found")
|
||||
return
|
||||
@@ -899,6 +913,8 @@ async def handle_calendar_event_delete(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle delete of a calendar event."""
|
||||
if not connection.user.permissions.check_entity(msg["entity_id"], POLICY_CONTROL):
|
||||
raise Unauthorized(entity_id=msg["entity_id"])
|
||||
|
||||
if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])):
|
||||
connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found")
|
||||
@@ -944,7 +960,10 @@ async def handle_calendar_event_delete(
|
||||
async def handle_calendar_event_update(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle creation of a calendar event."""
|
||||
"""Handle update of a calendar event."""
|
||||
if not connection.user.permissions.check_entity(msg["entity_id"], POLICY_CONTROL):
|
||||
raise Unauthorized(entity_id=msg["entity_id"])
|
||||
|
||||
if not (entity := hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])):
|
||||
connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found")
|
||||
return
|
||||
@@ -989,6 +1008,9 @@ async def handle_calendar_event_subscribe(
|
||||
"""Subscribe to calendar event updates."""
|
||||
entity_id: str = msg["entity_id"]
|
||||
|
||||
if not connection.user.permissions.check_entity(entity_id, POLICY_READ):
|
||||
raise Unauthorized(entity_id=entity_id)
|
||||
|
||||
if not (entity := hass.data[DATA_COMPONENT].get_entity(entity_id)):
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
|
||||
@@ -7,9 +7,7 @@ from homeassistant.helpers.condition import Condition, make_entity_state_conditi
|
||||
from .const import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_event_active": make_entity_state_condition(
|
||||
DOMAIN, STATE_ON, support_duration=True
|
||||
),
|
||||
"is_event_active": make_entity_state_condition(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -7,11 +7,8 @@ is_event_active:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -64,12 +64,6 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_offset_type": {
|
||||
"options": {
|
||||
"after": "After",
|
||||
|
||||
@@ -926,6 +926,7 @@ async def websocket_get_prefs(
|
||||
vol.Optional(PREF_ORIENTATION): vol.Coerce(Orientation),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
async def websocket_update_prefs(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
|
||||
@@ -13,8 +13,8 @@ from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionConfig,
|
||||
EntityConditionBase,
|
||||
EntityNumericalConditionBase,
|
||||
EntityNumericalConditionWithUnitBase,
|
||||
make_entity_numerical_condition,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
@@ -59,15 +59,36 @@ class ClimateTargetTemperatureCondition(EntityNumericalConditionWithUnitBase):
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_TEMPERATURE)}
|
||||
_unit_converter = TemperatureConverter
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target temperature."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_TEMPERATURE) is not None
|
||||
)
|
||||
|
||||
def _get_entity_unit(self, entity_state: State) -> str | None:
|
||||
"""Get the temperature unit of a climate entity from its state."""
|
||||
# Climate entities convert temperatures to the system unit via show_temp
|
||||
return self._hass.config.units.temperature_unit
|
||||
|
||||
|
||||
class ClimateTargetHumidityCondition(EntityNumericalConditionBase):
|
||||
"""Condition for climate target humidity."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)}
|
||||
_valid_unit = "%"
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target humidity."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_HUMIDITY) is not None
|
||||
)
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_hvac_mode": ClimateHVACModeCondition,
|
||||
"is_off": make_entity_state_condition(DOMAIN, HVACMode.OFF, support_duration=True),
|
||||
"is_off": make_entity_state_condition(DOMAIN, HVACMode.OFF),
|
||||
"is_on": make_entity_state_condition(
|
||||
DOMAIN,
|
||||
{
|
||||
@@ -88,10 +109,7 @@ CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_heating": make_entity_state_condition(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HVAC_ACTION)}, HVACAction.HEATING
|
||||
),
|
||||
"target_humidity": make_entity_numerical_condition(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
|
||||
valid_unit="%",
|
||||
),
|
||||
"target_humidity": ClimateTargetHumidityCondition,
|
||||
"target_temperature": ClimateTargetTemperatureCondition,
|
||||
}
|
||||
|
||||
|
||||
@@ -7,11 +7,13 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for: &condition_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
.humidity_threshold_entity: &humidity_threshold_entity
|
||||
- domain: input_number
|
||||
@@ -39,16 +41,7 @@
|
||||
- domain: number
|
||||
device_class: temperature
|
||||
|
||||
is_off:
|
||||
target: *condition_climate_target
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
is_off: *condition_common
|
||||
is_on: *condition_common
|
||||
is_cooling: *condition_common
|
||||
is_drying: *condition_common
|
||||
@@ -58,6 +51,7 @@ is_hvac_mode:
|
||||
target: *condition_climate_target
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
hvac_mode:
|
||||
context:
|
||||
filter_target: target
|
||||
@@ -73,6 +67,7 @@ target_humidity:
|
||||
target: *condition_climate_target
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
@@ -85,6 +80,7 @@ target_temperature:
|
||||
target: *condition_climate_target
|
||||
fields:
|
||||
behavior: *condition_behavior
|
||||
for: *condition_for
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Thermostat is cooling"
|
||||
@@ -22,6 +25,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Thermostat is drying"
|
||||
@@ -31,6 +37,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Thermostat is heating"
|
||||
@@ -41,6 +50,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
},
|
||||
"hvac_mode": {
|
||||
"description": "The HVAC modes to test for.",
|
||||
"name": "Modes"
|
||||
@@ -65,6 +77,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Thermostat is on"
|
||||
@@ -75,6 +90,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::climate::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -87,6 +105,9 @@
|
||||
"behavior": {
|
||||
"name": "[%key:component::climate::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::climate::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "[%key:component::climate::common::condition_threshold_name%]"
|
||||
}
|
||||
@@ -271,21 +292,6 @@
|
||||
"message": "Provided temperature {check_temp} is not valid. Accepted range is {min_temp} to {max_temp}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_fan_mode": {
|
||||
"description": "Sets the fan mode of a thermostat.",
|
||||
|
||||
@@ -8,14 +8,15 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityNumericalStateChangedTriggerBase,
|
||||
EntityNumericalStateChangedTriggerWithUnitBase,
|
||||
EntityNumericalStateCrossedThresholdTriggerBase,
|
||||
EntityNumericalStateCrossedThresholdTriggerWithUnitBase,
|
||||
EntityNumericalStateTriggerBase,
|
||||
EntityNumericalStateTriggerWithUnitBase,
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
make_entity_numerical_state_changed_trigger,
|
||||
make_entity_numerical_state_crossed_threshold_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
@@ -55,6 +56,13 @@ class _ClimateTargetTemperatureTriggerMixin(EntityNumericalStateTriggerWithUnitB
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_TEMPERATURE)}
|
||||
_unit_converter = TemperatureConverter
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target temperature."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_TEMPERATURE) is not None
|
||||
)
|
||||
|
||||
def _get_entity_unit(self, state: State) -> str | None:
|
||||
"""Get the temperature unit of a climate entity from its state."""
|
||||
# Climate entities convert temperatures to the system unit via show_temp
|
||||
@@ -75,6 +83,32 @@ class ClimateTargetTemperatureCrossedThresholdTrigger(
|
||||
"""Trigger for climate target temperature value crossing a threshold."""
|
||||
|
||||
|
||||
class _ClimateTargetHumidityTriggerMixin(EntityNumericalStateTriggerBase):
|
||||
"""Mixin for climate target humidity triggers."""
|
||||
|
||||
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)}
|
||||
_valid_unit = "%"
|
||||
|
||||
def _should_include(self, state: State) -> bool:
|
||||
"""Skip climate entities that do not expose a target humidity."""
|
||||
return (
|
||||
super()._should_include(state)
|
||||
and state.attributes.get(ATTR_HUMIDITY) is not None
|
||||
)
|
||||
|
||||
|
||||
class ClimateTargetHumidityChangedTrigger(
|
||||
_ClimateTargetHumidityTriggerMixin, EntityNumericalStateChangedTriggerBase
|
||||
):
|
||||
"""Trigger for climate target humidity value changes."""
|
||||
|
||||
|
||||
class ClimateTargetHumidityCrossedThresholdTrigger(
|
||||
_ClimateTargetHumidityTriggerMixin, EntityNumericalStateCrossedThresholdTriggerBase
|
||||
):
|
||||
"""Trigger for climate target humidity value crossing a threshold."""
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"hvac_mode_changed": HVACModeChangedTrigger,
|
||||
"started_cooling": make_entity_target_state_trigger(
|
||||
@@ -83,14 +117,8 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_drying": make_entity_target_state_trigger(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HVAC_ACTION)}, HVACAction.DRYING
|
||||
),
|
||||
"target_humidity_changed": make_entity_numerical_state_changed_trigger(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
|
||||
valid_unit="%",
|
||||
),
|
||||
"target_humidity_crossed_threshold": make_entity_numerical_state_crossed_threshold_trigger(
|
||||
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
|
||||
valid_unit="%",
|
||||
),
|
||||
"target_humidity_changed": ClimateTargetHumidityChangedTrigger,
|
||||
"target_humidity_crossed_threshold": ClimateTargetHumidityCrossedThresholdTrigger,
|
||||
"target_temperature_changed": ClimateTargetTemperatureChangedTrigger,
|
||||
"target_temperature_crossed_threshold": ClimateTargetTemperatureCrossedThresholdTrigger,
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
|
||||
@@ -7,12 +7,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for: &trigger_for
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -374,6 +374,7 @@ class CloudClient(Interface):
|
||||
method=payload["method"],
|
||||
query_string=payload["query"],
|
||||
mock_source=DOMAIN,
|
||||
remote=None, # Remote will be used for the local_only check, but since this is from the cloud we want it to be None to mark it as non-local and bypass the ip parsing and remote checks
|
||||
)
|
||||
|
||||
response = await webhook.async_handle_webhook(
|
||||
|
||||
@@ -615,6 +615,7 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
|
||||
return markdown
|
||||
|
||||
@require_admin
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Download support package file."""
|
||||
|
||||
@@ -709,6 +710,7 @@ def _require_cloud_login(
|
||||
return with_cloud_auth
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command({vol.Required("type"): "cloud/subscription"})
|
||||
@websocket_api.async_response
|
||||
@@ -750,6 +752,7 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
||||
return value
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
@@ -809,6 +812,7 @@ async def websocket_update_prefs(
|
||||
connection.send_message(websocket_api.result_message(msg["id"]))
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
@@ -829,6 +833,7 @@ async def websocket_hook_create(
|
||||
connection.send_message(websocket_api.result_message(msg["id"], hook))
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@_require_cloud_login
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
|
||||
@@ -18,7 +18,12 @@ from aiocomelit.const import (
|
||||
SCENARIO,
|
||||
VEDO,
|
||||
)
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiocomelit.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
DeviceStorageFailureError,
|
||||
)
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -112,6 +117,11 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_authenticate",
|
||||
) from err
|
||||
except DeviceStorageFailureError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_storage_failure",
|
||||
) from err
|
||||
|
||||
@abstractmethod
|
||||
async def _async_update_system_data(self) -> T:
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==2.0.2"]
|
||||
"requirements": ["aiocomelit==2.0.3"]
|
||||
}
|
||||
|
||||
@@ -121,6 +121,9 @@
|
||||
"cannot_retrieve_data": {
|
||||
"message": "Error retrieving data: {error}"
|
||||
},
|
||||
"device_storage_failure": {
|
||||
"message": "Device SD card read failure. The card may be corrupted or failing; replacement is recommended."
|
||||
},
|
||||
"humidity_while_off": {
|
||||
"message": "Cannot change humidity while off"
|
||||
},
|
||||
|
||||
@@ -5,7 +5,12 @@ from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any, Concatenate, Literal
|
||||
|
||||
from aiocomelit.api import ComelitSerialBridgeObject
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiocomelit.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
DeviceStorageFailureError,
|
||||
)
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -110,6 +115,12 @@ def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P](
|
||||
translation_key="cannot_retrieve_data",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
except DeviceStorageFailureError as err:
|
||||
self.coordinator.last_update_success = False
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_storage_failure",
|
||||
) from err
|
||||
except CannotAuthenticate:
|
||||
self.coordinator.last_update_success = False
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
|
||||
@@ -10,32 +10,19 @@ from homeassistant.auth.models import User
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
WS_TYPE_LIST = "config/auth/list"
|
||||
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): WS_TYPE_LIST}
|
||||
)
|
||||
|
||||
WS_TYPE_DELETE = "config/auth/delete"
|
||||
SCHEMA_WS_DELETE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
|
||||
{vol.Required("type"): WS_TYPE_DELETE, vol.Required("user_id"): str}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> bool:
|
||||
"""Enable the Home Assistant views."""
|
||||
websocket_api.async_register_command(
|
||||
hass, WS_TYPE_LIST, websocket_list, SCHEMA_WS_LIST
|
||||
)
|
||||
websocket_api.async_register_command(
|
||||
hass, WS_TYPE_DELETE, websocket_delete, SCHEMA_WS_DELETE
|
||||
)
|
||||
websocket_api.async_register_command(hass, websocket_list)
|
||||
websocket_api.async_register_command(hass, websocket_delete)
|
||||
websocket_api.async_register_command(hass, websocket_create)
|
||||
websocket_api.async_register_command(hass, websocket_update)
|
||||
return True
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "config/auth/list"})
|
||||
@websocket_api.async_response
|
||||
async def websocket_list(
|
||||
hass: HomeAssistant,
|
||||
@@ -49,6 +36,9 @@ async def websocket_list(
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "config/auth/delete", vol.Required("user_id"): str}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_delete(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -14,6 +14,8 @@ from datetime import datetime
|
||||
import functools as ft
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME
|
||||
from homeassistant.core import (
|
||||
HassJob,
|
||||
@@ -24,6 +26,7 @@ from homeassistant.core import (
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity import async_generate_entity_id
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
|
||||
@@ -149,8 +152,12 @@ class Configurator:
|
||||
self._requests: dict[
|
||||
str, tuple[str, list[dict[str, str]], ConfiguratorCallback | None]
|
||||
] = {}
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CONFIGURE, self.async_handle_service_call
|
||||
async_register_admin_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_CONFIGURE,
|
||||
self.async_handle_service_call,
|
||||
schema=vol.Schema({}, extra=vol.ALLOW_EXTRA),
|
||||
)
|
||||
|
||||
@async_callback
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.3.24"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.5.5"]
|
||||
}
|
||||
|
||||
@@ -7,11 +7,13 @@ is_value:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
threshold:
|
||||
required: true
|
||||
selector:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_for_name": "For at least",
|
||||
"trigger_behavior_name": "Trigger when",
|
||||
"trigger_for_name": "For at least"
|
||||
},
|
||||
@@ -10,6 +11,9 @@
|
||||
"behavior": {
|
||||
"name": "Condition passes if"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::counter::common::condition_for_name%]"
|
||||
},
|
||||
"threshold": {
|
||||
"name": "Threshold type"
|
||||
}
|
||||
@@ -43,21 +47,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"decrement": {
|
||||
"description": "Decrements a counter by its step size.",
|
||||
|
||||
@@ -7,12 +7,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -3,11 +3,13 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: condition
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
selector:
|
||||
duration:
|
||||
|
||||
awning_is_closed:
|
||||
fields: *condition_common_fields
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_name": "Condition passes if",
|
||||
"condition_for_name": "For at least",
|
||||
"trigger_behavior_name": "Trigger when",
|
||||
"trigger_for_name": "For at least"
|
||||
},
|
||||
@@ -10,6 +11,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Awning is closed"
|
||||
@@ -19,6 +23,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Awning is open"
|
||||
@@ -28,6 +35,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Blind is closed"
|
||||
@@ -37,6 +47,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Blind is open"
|
||||
@@ -46,6 +59,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Curtain is closed"
|
||||
@@ -55,6 +71,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Curtain is open"
|
||||
@@ -64,6 +83,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Shade is closed"
|
||||
@@ -73,6 +95,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Shade is open"
|
||||
@@ -82,6 +107,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Shutter is closed"
|
||||
@@ -91,6 +119,9 @@
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::cover::common::condition_behavior_name%]"
|
||||
},
|
||||
"for": {
|
||||
"name": "[%key:component::cover::common::condition_for_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Shutter is open"
|
||||
@@ -179,21 +210,6 @@
|
||||
"name": "Window"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"close_cover": {
|
||||
"description": "Closes a cover.",
|
||||
|
||||
@@ -3,12 +3,8 @@
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
automation_behavior:
|
||||
mode: trigger
|
||||
for:
|
||||
required: true
|
||||
default: 00:00:00
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.util.read_only_dict import ReadOnlyDict
|
||||
|
||||
from .const import CONF_BRIDGE_ID, DOMAIN, LOGGER
|
||||
@@ -98,7 +99,8 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
await async_remove_orphaned_entries_service(hub)
|
||||
|
||||
for service in SUPPORTED_SERVICES:
|
||||
hass.services.async_register(
|
||||
async_register_admin_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
service,
|
||||
async_call_deconz_service,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user