mirror of
https://github.com/home-assistant/core.git
synced 2025-09-20 18:39:40 +00:00
Compare commits
362 Commits
mqtt-json-
...
simplify_s
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c326f3fa1b | ||
![]() |
12cc0ed18d | ||
![]() |
8ca7562390 | ||
![]() |
942f7eebb1 | ||
![]() |
1a167e6aee | ||
![]() |
9531ae10f2 | ||
![]() |
bfc9616abf | ||
![]() |
054a5d751a | ||
![]() |
a43ba4f966 | ||
![]() |
1a5cae125f | ||
![]() |
f3b9bda876 | ||
![]() |
3f3aaa2815 | ||
![]() |
6dc7870779 | ||
![]() |
be83416c72 | ||
![]() |
c745ee18eb | ||
![]() |
cf907ae196 | ||
![]() |
8eee53036a | ||
![]() |
b37237d24b | ||
![]() |
950e758b62 | ||
![]() |
9cd940b7df | ||
![]() |
10b186a20d | ||
![]() |
757aec1c6b | ||
![]() |
0b159bdb9c | ||
![]() |
8728312e87 | ||
![]() |
bbb67db354 | ||
![]() |
265f5da21a | ||
![]() |
54859e8a83 | ||
![]() |
c87dba878d | ||
![]() |
8d8e008123 | ||
![]() |
b30667a469 | ||
![]() |
8920c548d5 | ||
![]() |
eac719f9af | ||
![]() |
71c274cb91 | ||
![]() |
d4902361e6 | ||
![]() |
f63eee3889 | ||
![]() |
21bfe610d1 | ||
![]() |
21c174e895 | ||
![]() |
ec148e0459 | ||
![]() |
286763b998 | ||
![]() |
5f88122a2b | ||
![]() |
31968d16ab | ||
![]() |
c125554817 | ||
![]() |
10f2955d34 | ||
![]() |
55712b784c | ||
![]() |
fe3a929556 | ||
![]() |
534801e80d | ||
![]() |
8aeda5a0c0 | ||
![]() |
eb1cbbc75c | ||
![]() |
fa8a4d7098 | ||
![]() |
2623ebac4d | ||
![]() |
1746c51ce4 | ||
![]() |
8b984a2105 | ||
![]() |
ebee370a56 | ||
![]() |
dabd096587 | ||
![]() |
21399818af | ||
![]() |
4354214fbf | ||
![]() |
5bd39804f1 | ||
![]() |
6d3ad3ab9c | ||
![]() |
4c212bdcd4 | ||
![]() |
b91b39580f | ||
![]() |
472d70b6c9 | ||
![]() |
017a84a859 | ||
![]() |
d184540967 | ||
![]() |
1740984b3b | ||
![]() |
4db8592c61 | ||
![]() |
27e630c107 | ||
![]() |
ea8833342d | ||
![]() |
87be2ba823 | ||
![]() |
51c35eb631 | ||
![]() |
24a86d042f | ||
![]() |
cd6f653123 | ||
![]() |
fd05ddca28 | ||
![]() |
a1f2eb44ae | ||
![]() |
c4ddc03dbc | ||
![]() |
9db5aafb71 | ||
![]() |
64cdcfb613 | ||
![]() |
c761ce699c | ||
![]() |
40ebce4ae8 | ||
![]() |
29914d6722 | ||
![]() |
5eef6edded | ||
![]() |
db729273a5 | ||
![]() |
946d75d651 | ||
![]() |
093f779edb | ||
![]() |
87658e77a7 | ||
![]() |
38f65cda98 | ||
![]() |
797c6ddedd | ||
![]() |
fe8a53407a | ||
![]() |
ae5f57fd99 | ||
![]() |
a93c3cc23c | ||
![]() |
804b42e1fb | ||
![]() |
a4f15e4840 | ||
![]() |
2471177c84 | ||
![]() |
a494d3ec69 | ||
![]() |
b10a9721a7 | ||
![]() |
04c0bb20d6 | ||
![]() |
1598c4ebe8 | ||
![]() |
d67ec7593a | ||
![]() |
4a4c124181 | ||
![]() |
c34af4be86 | ||
![]() |
823071b722 | ||
![]() |
462fa77ba1 | ||
![]() |
24fc8b9297 | ||
![]() |
2596ab2940 | ||
![]() |
23fa84e20e | ||
![]() |
7f13141297 | ||
![]() |
770f41d079 | ||
![]() |
df16e85359 | ||
![]() |
3c6db923a3 | ||
![]() |
450c47f932 | ||
![]() |
048f64eccf | ||
![]() |
c4c523e8b7 | ||
![]() |
87e30e0907 | ||
![]() |
74660da2d2 | ||
![]() |
6b8c180509 | ||
![]() |
eb4a873c43 | ||
![]() |
6aafa666d6 | ||
![]() |
9ee9bb368d | ||
![]() |
6e4258c8a9 | ||
![]() |
d65e704823 | ||
![]() |
aadaf87c16 | ||
![]() |
e70b147c0c | ||
![]() |
031b12752f | ||
![]() |
df0cfd69a9 | ||
![]() |
b2c53f2d78 | ||
![]() |
3649e949b1 | ||
![]() |
de7e2303a7 | ||
![]() |
892f3f267b | ||
![]() |
0254285285 | ||
![]() |
44a95242dc | ||
![]() |
f9b1c52d65 | ||
![]() |
aa8d78622c | ||
![]() |
ca6289a576 | ||
![]() |
0f372f4b47 | ||
![]() |
4bba167ab3 | ||
![]() |
962c0c443d | ||
![]() |
c6b4cac28a | ||
![]() |
3c7e3a5e30 | ||
![]() |
fa698956c3 | ||
![]() |
32f136b12f | ||
![]() |
e1f617df25 | ||
![]() |
84f1b8a5cc | ||
![]() |
e9cedf4852 | ||
![]() |
9c72b40ab4 | ||
![]() |
65f655e5f5 | ||
![]() |
af28573894 | ||
![]() |
c5fc1de3df | ||
![]() |
1df1144eb9 | ||
![]() |
d51c0e3752 | ||
![]() |
f5157878c2 | ||
![]() |
fb723571b6 | ||
![]() |
dbf80c3ce3 | ||
![]() |
e0a774b598 | ||
![]() |
168afc5f0e | ||
![]() |
af23670854 | ||
![]() |
935ce421df | ||
![]() |
c60ad8179d | ||
![]() |
14ad3364e3 | ||
![]() |
e229f36648 | ||
![]() |
f4f99e015c | ||
![]() |
5dc509cba0 | ||
![]() |
75597ac98d | ||
![]() |
b503f792b5 | ||
![]() |
410c3df6dd | ||
![]() |
f1bf28df18 | ||
![]() |
99fb64af9b | ||
![]() |
c0af0159e3 | ||
![]() |
71749da3a3 | ||
![]() |
b01be94034 | ||
![]() |
47ec8b7f12 | ||
![]() |
93ec9e448e | ||
![]() |
90bc41dd02 | ||
![]() |
410d869f3d | ||
![]() |
d75d9f2589 | ||
![]() |
afbb832a57 | ||
![]() |
bdc881c87a | ||
![]() |
22ea269ed8 | ||
![]() |
10fecbaf4d | ||
![]() |
cbdc1dc5b6 | ||
![]() |
b203a831c9 | ||
![]() |
5ccbee4c9a | ||
![]() |
1483c9488f | ||
![]() |
f5535db24c | ||
![]() |
e40ecdfb00 | ||
![]() |
2f4c69bbd5 | ||
![]() |
dd0f6a702b | ||
![]() |
5ba580bc25 | ||
![]() |
c13002bdd5 | ||
![]() |
75d22191a0 | ||
![]() |
58d6549f1c | ||
![]() |
1fcc6df1fd | ||
![]() |
9bf467e6d1 | ||
![]() |
d877d6d93f | ||
![]() |
d2b255ba92 | ||
![]() |
1509c429d6 | ||
![]() |
af9717c1cd | ||
![]() |
49e75c9cf8 | ||
![]() |
c97f16a96d | ||
![]() |
a3a4433d62 | ||
![]() |
f832002afd | ||
![]() |
dbc7f2b43c | ||
![]() |
1cd3a1eede | ||
![]() |
7d6e0d44b0 | ||
![]() |
2bb6d745ca | ||
![]() |
beb9d7856c | ||
![]() |
6a4c8a550a | ||
![]() |
7d23752a3f | ||
![]() |
c2b2a78db5 | ||
![]() |
0fb6bbee59 | ||
![]() |
d93e0a105a | ||
![]() |
ab1619c0b4 | ||
![]() |
70df7b8503 | ||
![]() |
0e2c2ad355 | ||
![]() |
4c26718739 | ||
![]() |
96034e1525 | ||
![]() |
df1302fc1c | ||
![]() |
5a5b639aa4 | ||
![]() |
e9fbe2227f | ||
![]() |
82b57568a0 | ||
![]() |
be692ab2fd | ||
![]() |
24c04cceee | ||
![]() |
97077898bb | ||
![]() |
08485f4e09 | ||
![]() |
b64d60fce4 | ||
![]() |
3690497e1f | ||
![]() |
3499ed7a98 | ||
![]() |
2c809d5903 | ||
![]() |
40988198f3 | ||
![]() |
b87e581cde | ||
![]() |
f1c55ee7e2 | ||
![]() |
9f17a82acf | ||
![]() |
3955391cda | ||
![]() |
d9a757c7e6 | ||
![]() |
aa1ec944c0 | ||
![]() |
88c3b6a9f5 | ||
![]() |
ada73953f6 | ||
![]() |
42e9b9a0bc | ||
![]() |
ec6a052ff5 | ||
![]() |
c91d64e04d | ||
![]() |
ab5d1d27f1 | ||
![]() |
1c10b85fed | ||
![]() |
91a7db08ff | ||
![]() |
a764d54123 | ||
![]() |
dc09e33556 | ||
![]() |
14173bd9ec | ||
![]() |
d2e7537629 | ||
![]() |
9a165a64fe | ||
![]() |
9c749a6abc | ||
![]() |
2e33222c71 | ||
![]() |
ab1c2c4f70 | ||
![]() |
529219ae69 | ||
![]() |
d6ce71fa61 | ||
![]() |
e5b67d513a | ||
![]() |
a547179f66 | ||
![]() |
8c61788a7d | ||
![]() |
6b934d94db | ||
![]() |
d30ad82774 | ||
![]() |
4618b33e93 | ||
![]() |
d6299094db | ||
![]() |
087d9d30c0 | ||
![]() |
f07890cf5c | ||
![]() |
e5b78cc481 | ||
![]() |
12b409d8e1 | ||
![]() |
def5408db8 | ||
![]() |
f105b45ee2 | ||
![]() |
9d904c30a7 | ||
![]() |
99b047939f | ||
![]() |
3a615908ee | ||
![]() |
baff541f46 | ||
![]() |
6d8c35cfe9 | ||
![]() |
b8d9883e74 | ||
![]() |
c3c65af450 | ||
![]() |
3af8616764 | ||
![]() |
64ec4609c5 | ||
![]() |
c78bc26b83 | ||
![]() |
0c093646c9 | ||
![]() |
1b27acdde0 | ||
![]() |
9dafc0e02f | ||
![]() |
0091dafcb0 | ||
![]() |
b387acffb7 | ||
![]() |
36b3133fa2 | ||
![]() |
fe01e96012 | ||
![]() |
0b56ec16ed | ||
![]() |
ca79f4c963 | ||
![]() |
9a43f2776d | ||
![]() |
0ac7cb311d | ||
![]() |
3472020812 | ||
![]() |
dcd09523a6 | ||
![]() |
a5bfdc697b | ||
![]() |
dbb29a7c7d | ||
![]() |
124a63d846 | ||
![]() |
3de701a9ab | ||
![]() |
bfe1dd65b3 | ||
![]() |
71bf5e14cc | ||
![]() |
6d231c2c99 | ||
![]() |
b93072865b | ||
![]() |
14ebb6cd74 | ||
![]() |
2ddbcd560e | ||
![]() |
c5ff7ed1c9 | ||
![]() |
c4bea5616c | ||
![]() |
17fe147726 | ||
![]() |
9fae4e7e1f | ||
![]() |
0cebca498c | ||
![]() |
521ff62aae | ||
![]() |
fd1df5ad88 | ||
![]() |
91e7a35a07 | ||
![]() |
09381abf46 | ||
![]() |
3713c03c07 | ||
![]() |
bd8ddd7cd8 | ||
![]() |
f0dc1f927b | ||
![]() |
984590c6d1 | ||
![]() |
d324021a3f | ||
![]() |
1f4c0b3e9b | ||
![]() |
69893aba4b | ||
![]() |
b9dcf89b37 | ||
![]() |
54fd55a1c6 | ||
![]() |
cc64fa639d | ||
![]() |
84140ba414 | ||
![]() |
d1726b84c8 | ||
![]() |
4724ecbc38 | ||
![]() |
85afe87b5e | ||
![]() |
5960179844 | ||
![]() |
9f8f7d2fde | ||
![]() |
4c22264b13 | ||
![]() |
baf4382724 | ||
![]() |
8263ea4a4a | ||
![]() |
8412581be4 | ||
![]() |
207c848438 | ||
![]() |
2b61601fd7 | ||
![]() |
ee506e6c14 | ||
![]() |
8003a49571 | ||
![]() |
e438b11afb | ||
![]() |
64ba43703c | ||
![]() |
1d214ae120 | ||
![]() |
68d987f866 | ||
![]() |
299cc5e40c | ||
![]() |
2c3456177e | ||
![]() |
1ef90180cc | ||
![]() |
4c1364dfd1 | ||
![]() |
09a44a6a30 | ||
![]() |
63303bdcde | ||
![]() |
59cd24f54b | ||
![]() |
82b9fead39 | ||
![]() |
a879e36e9b | ||
![]() |
b12c458188 | ||
![]() |
0cda883b56 | ||
![]() |
ae58e633f0 | ||
![]() |
06480bfd9d | ||
![]() |
625f586945 | ||
![]() |
7dbeaa475d | ||
![]() |
dff3d5f8af | ||
![]() |
89c335919a | ||
![]() |
2bb4573357 | ||
![]() |
7037ce989c | ||
![]() |
bfdd2053ba | ||
![]() |
fcc3f92f8c | ||
![]() |
8710267d53 | ||
![]() |
85b6adcc9a | ||
![]() |
beec6e86e0 | ||
![]() |
3dacffaaf9 | ||
![]() |
d90f2a1de1 | ||
![]() |
b6c9217429 | ||
![]() |
7fc8da6769 |
77
.claude/agents/quality-scale-rule-verifier.md
Normal file
77
.claude/agents/quality-scale-rule-verifier.md
Normal file
@@ -0,0 +1,77 @@
|
||||
---
|
||||
name: quality-scale-rule-verifier
|
||||
description: |
|
||||
Use this agent when you need to verify that a Home Assistant integration follows a specific quality scale rule. This includes checking if the integration implements required patterns, configurations, or code structures defined by the quality scale system.
|
||||
|
||||
<example>
|
||||
Context: The user wants to verify if an integration follows a specific quality scale rule.
|
||||
user: "Check if the peblar integration follows the config-flow rule"
|
||||
assistant: "I'll use the quality scale rule verifier to check if the peblar integration properly implements the config-flow rule."
|
||||
<commentary>
|
||||
Since the user is asking to verify a quality scale rule implementation, use the quality-scale-rule-verifier agent.
|
||||
</commentary>
|
||||
</example>
|
||||
|
||||
<example>
|
||||
Context: The user is reviewing if an integration reaches a specific quality scale level.
|
||||
user: "Verify that this integration reaches the bronze quality scale"
|
||||
assistant: "Let me use the quality scale rule verifier to check the bronze quality scale implementation."
|
||||
<commentary>
|
||||
The user wants to verify the integration has reached a certain quality level, so use multiple quality-scale-rule-verifier agents to verify each bronze rule.
|
||||
</commentary>
|
||||
</example>
|
||||
model: inherit
|
||||
color: yellow
|
||||
tools: Read, Bash, Grep, Glob, WebFetch
|
||||
---
|
||||
|
||||
You are an expert Home Assistant integration quality scale auditor specializing in verifying compliance with specific quality scale rules. You have deep knowledge of Home Assistant's architecture, best practices, and the quality scale system that ensures integration consistency and reliability.
|
||||
|
||||
You will verify if an integration follows a specific quality scale rule by:
|
||||
|
||||
1. **Fetching Rule Documentation**: Retrieve the official rule documentation from:
|
||||
`https://raw.githubusercontent.com/home-assistant/developers.home-assistant/refs/heads/master/docs/core/integration-quality-scale/rules/{rule_name}.md`
|
||||
where `{rule_name}` is the rule identifier (e.g., 'config-flow', 'entity-unique-id', 'parallel-updates')
|
||||
|
||||
2. **Understanding Rule Requirements**: Parse the rule documentation to identify:
|
||||
- Core requirements and mandatory implementations
|
||||
- Specific code patterns or configurations required
|
||||
- Common violations and anti-patterns
|
||||
- Exemption criteria (when a rule might not apply)
|
||||
- The quality tier this rule belongs to (Bronze, Silver, Gold, Platinum)
|
||||
|
||||
3. **Analyzing Integration Code**: Examine the integration's codebase at `homeassistant/components/<integration domain>` focusing on:
|
||||
- `manifest.json` for quality scale declaration and configuration
|
||||
- `quality_scale.yaml` for rule status (done, todo, exempt)
|
||||
- Relevant Python modules based on the rule requirements
|
||||
- Configuration files and service definitions as needed
|
||||
|
||||
4. **Verification Process**:
|
||||
- Check if the rule is marked as 'done', 'todo', or 'exempt' in quality_scale.yaml
|
||||
- If marked 'exempt', verify the exemption reason is valid
|
||||
- If marked 'done', verify the actual implementation matches requirements
|
||||
- Identify specific files and code sections that demonstrate compliance or violations
|
||||
- Consider the integration's declared quality tier when applying rules
|
||||
- To fetch the integration docs, use WebFetch to fetch from `https://raw.githubusercontent.com/home-assistant/home-assistant.io/refs/heads/current/source/_integrations/<integration domain>.markdown`
|
||||
- To fetch information about a PyPI package, use the URL `https://pypi.org/pypi/<package>/json`
|
||||
|
||||
5. **Reporting Findings**: Provide a comprehensive verification report that includes:
|
||||
- **Rule Summary**: Brief description of what the rule requires
|
||||
- **Compliance Status**: Clear pass/fail/exempt determination
|
||||
- **Evidence**: Specific code examples showing compliance or violations
|
||||
- **Issues Found**: Detailed list of any non-compliance issues with file locations
|
||||
- **Recommendations**: Actionable steps to achieve compliance if needed
|
||||
- **Exemption Analysis**: If applicable, whether the exemption is justified
|
||||
|
||||
When examining code, you will:
|
||||
- Look for exact implementation patterns specified in the rule
|
||||
- Verify all required components are present and properly configured
|
||||
- Check for common mistakes and anti-patterns
|
||||
- Consider edge cases and error handling requirements
|
||||
- Validate that implementations follow Home Assistant conventions
|
||||
|
||||
You will be thorough but focused, examining only the aspects relevant to the specific rule being verified. You will provide clear, actionable feedback that helps developers understand both what needs to be fixed and why it matters for integration quality.
|
||||
|
||||
If you cannot access the rule documentation or find the integration code, clearly state what information is missing and what you would need to complete the verification.
|
||||
|
||||
Remember that quality scale rules are cumulative - Bronze rules apply to all integrations with a quality scale, Silver rules apply to Silver+ integrations, and so on. Always consider the integration's target quality level when determining which rules should be enforced.
|
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
5
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -55,8 +55,12 @@
|
||||
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
||||
We're here to help! This is simply a reminder of what we are going to look
|
||||
for before merging your code.
|
||||
|
||||
AI tools are welcome, but contributors are responsible for *fully*
|
||||
understanding the code before submitting a PR.
|
||||
-->
|
||||
|
||||
- [ ] I understand the code I am submitting and can explain how it works.
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
@@ -64,6 +68,7 @@
|
||||
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
|
||||
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
- [ ] Any generated code has been carefully reviewed for correctness and compliance with project standards.
|
||||
|
||||
If user exposed functionality or configuration variables are added/changed:
|
||||
|
||||
|
46
.github/workflows/builder.yml
vendored
46
.github/workflows/builder.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -90,11 +90,11 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -190,14 +190,15 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -242,7 +243,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -256,14 +257,15 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -279,7 +281,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -321,23 +323,23 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.9.2
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.5.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -454,15 +456,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -480,7 +482,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
|
193
.github/workflows/ci.yaml
vendored
193
.github/workflows/ci.yaml
vendored
@@ -98,7 +98,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: core
|
||||
with:
|
||||
filters: .core_files.yaml
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
echo "Result:"
|
||||
cat .integration_paths.yaml
|
||||
- name: Filter for integration changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: integrations
|
||||
with:
|
||||
filters: .integration_paths.yaml
|
||||
@@ -254,16 +254,16 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -300,16 +300,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -340,16 +340,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -380,16 +380,16 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -470,7 +470,7 @@ jobs:
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -489,10 +489,10 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -523,22 +523,24 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Restore apt cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
id: cache-apt
|
||||
uses: actions/cache@v4.2.4
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
if: |
|
||||
steps.cache-venv.outputs.cache-hit != 'true'
|
||||
|| steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||
fi
|
||||
@@ -563,9 +565,18 @@ jobs:
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -585,7 +596,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -631,16 +642,16 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -664,16 +675,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -698,9 +709,9 @@ jobs:
|
||||
&& github.event_name == 'pull_request'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.7.3
|
||||
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -721,16 +732,16 @@ jobs:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -742,7 +753,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -764,16 +775,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -811,16 +822,16 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -856,10 +867,10 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -872,7 +883,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -880,7 +891,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.4
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -947,16 +958,16 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -968,7 +979,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -1022,16 +1033,16 @@ jobs:
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1045,7 +1056,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1084,14 +1095,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1104,7 +1115,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1169,16 +1180,16 @@ jobs:
|
||||
libmariadb-dev-compat \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1237,7 +1248,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1245,7 +1256,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1259,7 +1270,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1325,16 +1336,16 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1394,7 +1405,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1402,7 +1413,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1416,7 +1427,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1437,14 +1448,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1498,16 +1509,16 @@ jobs:
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1563,14 +1574,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1583,7 +1594,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1601,14 +1612,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1628,11 +1639,11 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.30.3
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.30.3
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v2.0.1
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
32
.github/workflows/wheels.yml
vendored
32
.github/workflows/wheels.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v6.0.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -135,20 +135,20 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -158,6 +158,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
@@ -184,25 +185,25 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v5.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v5.0.0
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,6 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.07.0
|
||||
with:
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -140,5 +140,5 @@ tmp_cache
|
||||
pytest_buckets.txt
|
||||
|
||||
# AI tooling
|
||||
.claude
|
||||
.claude/settings.local.json
|
||||
|
||||
|
@@ -142,6 +142,7 @@ homeassistant.components.cloud.*
|
||||
homeassistant.components.co2signal.*
|
||||
homeassistant.components.comelit.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.compit.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
homeassistant.components.cookidoo.*
|
||||
|
27
CODEOWNERS
generated
27
CODEOWNERS
generated
@@ -107,8 +107,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/ambient_station/ @bachya
|
||||
/tests/components/ambient_station/ @bachya
|
||||
/homeassistant/components/amcrest/ @flacjacket
|
||||
/homeassistant/components/analytics/ @home-assistant/core @ludeeus
|
||||
/tests/components/analytics/ @home-assistant/core @ludeeus
|
||||
/homeassistant/components/analytics/ @home-assistant/core
|
||||
/tests/components/analytics/ @home-assistant/core
|
||||
/homeassistant/components/analytics_insights/ @joostlek
|
||||
/tests/components/analytics_insights/ @joostlek
|
||||
/homeassistant/components/android_ip_webcam/ @engrbm87
|
||||
@@ -292,6 +292,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/command_line/ @gjohansson-ST
|
||||
/homeassistant/components/compensation/ @Petro31
|
||||
/tests/components/compensation/ @Petro31
|
||||
/homeassistant/components/compit/ @Przemko92
|
||||
/tests/components/compit/ @Przemko92
|
||||
/homeassistant/components/config/ @home-assistant/core
|
||||
/tests/components/config/ @home-assistant/core
|
||||
/homeassistant/components/configurator/ @home-assistant/core
|
||||
@@ -442,8 +444,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
@@ -970,6 +970,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/moat/ @bdraco
|
||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||
/tests/components/mobile_app/ @home-assistant/core
|
||||
/homeassistant/components/modbus/ @janiversen
|
||||
/tests/components/modbus/ @janiversen
|
||||
/homeassistant/components/modem_callerid/ @tkdrob
|
||||
/tests/components/modem_callerid/ @tkdrob
|
||||
/homeassistant/components/modern_forms/ @wonderslug
|
||||
@@ -1017,7 +1019,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM @heindrichpaul
|
||||
/tests/components/nederlandse_spoorwegen/ @YarmoM @heindrichpaul
|
||||
/homeassistant/components/ness_alarm/ @nickw444
|
||||
/tests/components/ness_alarm/ @nickw444
|
||||
/homeassistant/components/nest/ @allenporter
|
||||
@@ -1349,6 +1352,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/samsungtv/ @chemelli74 @epenet
|
||||
/homeassistant/components/sanix/ @tomaszsluszniak
|
||||
/tests/components/sanix/ @tomaszsluszniak
|
||||
/homeassistant/components/satel_integra/ @Tommatheussen
|
||||
/tests/components/satel_integra/ @Tommatheussen
|
||||
/homeassistant/components/scene/ @home-assistant/core
|
||||
/tests/components/scene/ @home-assistant/core
|
||||
/homeassistant/components/schedule/ @home-assistant/core
|
||||
@@ -1530,8 +1535,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur @XiaoLing-git
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
||||
@@ -1676,6 +1681,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/uptime_kuma/ @tr4nt0r
|
||||
/homeassistant/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/tests/components/uptimerobot/ @ludeeus @chemelli74
|
||||
/homeassistant/components/usage_prediction/ @home-assistant/core
|
||||
/tests/components/usage_prediction/ @home-assistant/core
|
||||
/homeassistant/components/usb/ @bdraco
|
||||
/tests/components/usb/ @bdraco
|
||||
/homeassistant/components/usgs_earthquakes_feed/ @exxamalte
|
||||
@@ -1705,6 +1712,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
@@ -1720,8 +1729,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
/tests/components/volvo/ @thomasddn
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
/tests/components/volvooncall/ @molobrakos
|
||||
/homeassistant/components/volvooncall/ @molobrakos @svrooij
|
||||
/tests/components/volvooncall/ @molobrakos @svrooij
|
||||
/homeassistant/components/wake_on_lan/ @ntilley905
|
||||
/tests/components/wake_on_lan/ @ntilley905
|
||||
/homeassistant/components/wake_word/ @home-assistant/core @synesthesiam
|
||||
|
@@ -2,21 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from accuweather import AccuWeather
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -28,7 +30,6 @@ PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||
"""Set up AccuWeather as config entry."""
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
name: str = entry.data[CONF_NAME]
|
||||
|
||||
location_key = entry.unique_id
|
||||
|
||||
@@ -41,26 +42,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"observation",
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
)
|
||||
coordinator_hourly_forecast = AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
)
|
||||
|
||||
await coordinator_observation.async_config_entry_first_refresh()
|
||||
await coordinator_daily_forecast.async_config_entry_first_refresh()
|
||||
await asyncio.gather(
|
||||
coordinator_observation.async_config_entry_first_refresh(),
|
||||
coordinator_daily_forecast.async_config_entry_first_refresh(),
|
||||
coordinator_hourly_forecast.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = AccuWeatherData(
|
||||
coordinator_observation=coordinator_observation,
|
||||
coordinator_daily_forecast=coordinator_daily_forecast,
|
||||
coordinator_hourly_forecast=coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
@@ -71,3 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
@@ -12,6 +13,7 @@ from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExcee
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
@@ -20,7 +22,13 @@ from homeassistant.helpers.update_coordinator import (
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceededError)
|
||||
|
||||
@@ -33,6 +41,7 @@ class AccuWeatherData:
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
coordinator_hourly_forecast: AccuWeatherHourlyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||
@@ -48,13 +57,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -65,8 +72,8 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
name=f"{name} (observation)",
|
||||
update_interval=UPDATE_INTERVAL_OBSERVATION,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
@@ -86,23 +93,25 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
class AccuWeatherForecastDataUpdateCoordinator(
|
||||
TimestampDataUpdateCoordinator[list[dict[str, Any]]]
|
||||
):
|
||||
"""Class to manage fetching AccuWeather data API."""
|
||||
"""Base class for AccuWeather forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
update_interval: timedelta,
|
||||
fetch_method: Callable[..., Awaitable[list[dict[str, Any]]]],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.accuweather = accuweather
|
||||
self.location_key = accuweather.location_key
|
||||
self._fetch_method = fetch_method
|
||||
name = config_entry.data[CONF_NAME]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self.location_key is not None
|
||||
@@ -118,12 +127,10 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> list[dict[str, Any]]:
|
||||
"""Update data via library."""
|
||||
"""Update forecast data via library."""
|
||||
try:
|
||||
async with timeout(10):
|
||||
result = await self.accuweather.async_get_daily_forecast(
|
||||
language=self.hass.config.language
|
||||
)
|
||||
result = await self._fetch_method(language=self.hass.config.language)
|
||||
except EXCEPTIONS as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -132,10 +139,53 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
) from error
|
||||
|
||||
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for daily forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"daily forecast",
|
||||
UPDATE_INTERVAL_DAILY_FORECAST,
|
||||
fetch_method=accuweather.async_get_daily_forecast,
|
||||
)
|
||||
|
||||
|
||||
class AccuWeatherHourlyForecastDataUpdateCoordinator(
|
||||
AccuWeatherForecastDataUpdateCoordinator
|
||||
):
|
||||
"""Coordinator for hourly forecast."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry,
|
||||
accuweather,
|
||||
"hourly forecast",
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST,
|
||||
fetch_method=accuweather.async_get_hourly_forecast,
|
||||
)
|
||||
|
||||
|
||||
def _get_device_info(location_key: str, name: str) -> DeviceInfo:
|
||||
"""Get device info."""
|
||||
return DeviceInfo(
|
||||
|
@@ -45,6 +45,7 @@ from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
@@ -64,6 +65,7 @@ class AccuWeatherEntity(
|
||||
CoordinatorWeatherEntity[
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherHourlyForecastDataUpdateCoordinator,
|
||||
]
|
||||
):
|
||||
"""Define an AccuWeather entity."""
|
||||
@@ -76,6 +78,7 @@ class AccuWeatherEntity(
|
||||
super().__init__(
|
||||
observation_coordinator=accuweather_data.coordinator_observation,
|
||||
daily_coordinator=accuweather_data.coordinator_daily_forecast,
|
||||
hourly_coordinator=accuweather_data.coordinator_hourly_forecast,
|
||||
)
|
||||
|
||||
self._attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
|
||||
@@ -86,10 +89,13 @@ class AccuWeatherEntity(
|
||||
self._attr_unique_id = accuweather_data.coordinator_observation.location_key
|
||||
self._attr_attribution = ATTRIBUTION
|
||||
self._attr_device_info = accuweather_data.coordinator_observation.device_info
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_DAILY
|
||||
self._attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
|
||||
self.observation_coordinator = accuweather_data.coordinator_observation
|
||||
self.daily_coordinator = accuweather_data.coordinator_daily_forecast
|
||||
self.hourly_coordinator = accuweather_data.coordinator_hourly_forecast
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
@@ -207,3 +213,32 @@ class AccuWeatherEntity(
|
||||
}
|
||||
for item in self.daily_coordinator.data
|
||||
]
|
||||
|
||||
@callback
|
||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||
"""Return the hourly forecast in native units."""
|
||||
return [
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(
|
||||
item["EpochDateTime"]
|
||||
).isoformat(),
|
||||
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCover"],
|
||||
ATTR_FORECAST_HUMIDITY: item["RelativeHumidity"],
|
||||
ATTR_FORECAST_NATIVE_TEMP: item["Temperature"][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperature"][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: item["TotalLiquid"][ATTR_VALUE],
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY: item[
|
||||
"PrecipitationProbability"
|
||||
],
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: item["Wind"][ATTR_SPEED][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: item["WindGust"][ATTR_SPEED][
|
||||
ATTR_VALUE
|
||||
],
|
||||
ATTR_FORECAST_UV_INDEX: item["UVIndex"],
|
||||
ATTR_FORECAST_WIND_BEARING: item["Wind"][ATTR_DIRECTION]["Degrees"],
|
||||
ATTR_FORECAST_CONDITION: CONDITION_MAP.get(item["WeatherIcon"]),
|
||||
}
|
||||
for item in self.hourly_coordinator.data
|
||||
]
|
||||
|
@@ -3,10 +3,8 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_DESCRIPTION, CONF_SELECTOR
|
||||
from homeassistant.core import (
|
||||
@@ -28,7 +26,6 @@ from .const import (
|
||||
ATTR_STRUCTURE,
|
||||
ATTR_TASK_NAME,
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -42,7 +39,6 @@ from .task import (
|
||||
GenDataTaskResult,
|
||||
GenImageTask,
|
||||
GenImageTaskResult,
|
||||
ImageData,
|
||||
async_generate_data,
|
||||
async_generate_image,
|
||||
)
|
||||
@@ -55,7 +51,6 @@ __all__ = [
|
||||
"GenDataTaskResult",
|
||||
"GenImageTask",
|
||||
"GenImageTaskResult",
|
||||
"ImageData",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
@@ -94,10 +89,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
hass.data[DATA_IMAGES] = {}
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_http(hass)
|
||||
hass.http.register_view(ImageView)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_GENERATE_DATA,
|
||||
@@ -209,28 +202,3 @@ class AITaskPreferences:
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {key: getattr(self, key) for key in self.KEYS}
|
||||
|
||||
|
||||
class ImageView(HomeAssistantView):
|
||||
"""View to generated images."""
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: web.Request,
|
||||
filename: str,
|
||||
) -> web.Response:
|
||||
"""Serve image."""
|
||||
hass = request.app[KEY_HASS]
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
image_data = image_storage.get(filename)
|
||||
|
||||
if image_data is None:
|
||||
raise web.HTTPNotFound
|
||||
|
||||
return web.Response(
|
||||
body=image_data.data,
|
||||
content_type=image_data.mime_type,
|
||||
)
|
||||
|
@@ -8,19 +8,19 @@ from typing import TYPE_CHECKING, Final
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.media_source import local_source
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
from .task import ImageData
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
DATA_IMAGES: HassKey[dict[str, ImageData]] = HassKey(f"{DOMAIN}_images")
|
||||
DATA_MEDIA_SOURCE: HassKey[local_source.LocalSource] = HassKey(f"{DOMAIN}_media_source")
|
||||
|
||||
IMAGE_DIR: Final = "image"
|
||||
IMAGE_EXPIRY_TIME = 60 * 60 # 1 hour
|
||||
MAX_IMAGES = 20
|
||||
|
||||
SERVICE_GENERATE_DATA = "generate_data"
|
||||
SERVICE_GENERATE_IMAGE = "generate_image"
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"after_dependencies": ["camera", "http"],
|
||||
"after_dependencies": ["camera"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation", "media_source"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
|
@@ -2,89 +2,31 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
MediaSource,
|
||||
MediaSourceItem,
|
||||
PlayMedia,
|
||||
Unresolvable,
|
||||
)
|
||||
from homeassistant.components.media_source import MediaSource, local_source
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import DATA_MEDIA_SOURCE, DOMAIN, IMAGE_DIR
|
||||
|
||||
|
||||
async def async_get_media_source(hass: HomeAssistant) -> ImageMediaSource:
|
||||
"""Set up image media source."""
|
||||
_LOGGER.debug("Setting up image media source")
|
||||
return ImageMediaSource(hass)
|
||||
async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
|
||||
"""Set up local media source."""
|
||||
media_dirs = list(hass.config.media_dirs.values())
|
||||
|
||||
|
||||
class ImageMediaSource(MediaSource):
|
||||
"""Provide images as media sources."""
|
||||
|
||||
name: str = "AI Generated Images"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize ImageMediaSource."""
|
||||
super().__init__(DOMAIN)
|
||||
self.hass = hass
|
||||
|
||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||
"""Resolve media to a url."""
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
image = image_storage.get(item.identifier)
|
||||
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
if not media_dirs:
|
||||
raise HomeAssistantError(
|
||||
"AI Task media source requires at least one media directory configured"
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
item: MediaSourceItem,
|
||||
) -> BrowseMediaSource:
|
||||
"""Return media."""
|
||||
if item.identifier:
|
||||
raise BrowseError("Unknown item")
|
||||
media_dir = Path(media_dirs[0]) / DOMAIN / IMAGE_DIR
|
||||
|
||||
image_storage = self.hass.data[DATA_IMAGES]
|
||||
|
||||
children = [
|
||||
BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=filename,
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=image.mime_type,
|
||||
title=image.title or filename,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
)
|
||||
for filename, image in image_storage.items()
|
||||
]
|
||||
|
||||
return BrowseMediaSource(
|
||||
domain=DOMAIN,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type="",
|
||||
title="AI Generated Images",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.IMAGE,
|
||||
children=children,
|
||||
)
|
||||
hass.data[DATA_MEDIA_SOURCE] = source = local_source.LocalSource(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"AI Generated Images",
|
||||
{IMAGE_DIR: str(media_dir)},
|
||||
f"/{DOMAIN}",
|
||||
)
|
||||
return source
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import io
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
@@ -12,35 +12,33 @@ from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components import camera, conversation, image, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.util import RE_SANITIZE_FILENAME, slugify
|
||||
|
||||
from .const import (
|
||||
DATA_COMPONENT,
|
||||
DATA_IMAGES,
|
||||
DATA_MEDIA_SOURCE,
|
||||
DATA_PREFERENCES,
|
||||
DOMAIN,
|
||||
IMAGE_DIR,
|
||||
IMAGE_EXPIRY_TIME,
|
||||
MAX_IMAGES,
|
||||
AITaskEntityFeature,
|
||||
)
|
||||
|
||||
|
||||
def _save_camera_snapshot(image: camera.Image) -> Path:
|
||||
def _save_camera_snapshot(image_data: camera.Image | image.Image) -> Path:
|
||||
"""Save camera snapshot to temp file."""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="wb",
|
||||
suffix=mimetypes.guess_extension(image.content_type, False),
|
||||
suffix=mimetypes.guess_extension(image_data.content_type, False),
|
||||
delete=False,
|
||||
) as temp_file:
|
||||
temp_file.write(image.content)
|
||||
temp_file.write(image_data.content)
|
||||
return Path(temp_file.name)
|
||||
|
||||
|
||||
@@ -56,26 +54,31 @@ async def _resolve_attachments(
|
||||
for attachment in attachments or []:
|
||||
media_content_id = attachment["media_content_id"]
|
||||
|
||||
# Special case for camera media sources
|
||||
if media_content_id.startswith("media-source://camera/"):
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix("media-source://camera/")
|
||||
# Special case for certain media sources
|
||||
for integration in camera, image:
|
||||
media_source_prefix = f"media-source://{integration.DOMAIN}/"
|
||||
if not media_content_id.startswith(media_source_prefix):
|
||||
continue
|
||||
|
||||
# Get snapshot from camera
|
||||
image = await camera.async_get_image(hass, entity_id)
|
||||
# Extract entity_id from the media content ID
|
||||
entity_id = media_content_id.removeprefix(media_source_prefix)
|
||||
|
||||
# Get snapshot from entity
|
||||
image_data = await integration.async_get_image(hass, entity_id)
|
||||
|
||||
temp_filename = await hass.async_add_executor_job(
|
||||
_save_camera_snapshot, image
|
||||
_save_camera_snapshot, image_data
|
||||
)
|
||||
created_files.append(temp_filename)
|
||||
|
||||
resolved_attachments.append(
|
||||
conversation.Attachment(
|
||||
media_content_id=media_content_id,
|
||||
mime_type=image.content_type,
|
||||
mime_type=image_data.content_type,
|
||||
path=temp_filename,
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
# Handle regular media sources
|
||||
media = await media_source.async_resolve_media(hass, media_content_id, None)
|
||||
@@ -158,24 +161,6 @@ async def async_generate_data(
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_images(image_storage: dict[str, ImageData], num_to_remove: int) -> None:
|
||||
"""Remove old images to keep the storage size under the limit."""
|
||||
if num_to_remove <= 0:
|
||||
return
|
||||
|
||||
if num_to_remove >= len(image_storage):
|
||||
image_storage.clear()
|
||||
return
|
||||
|
||||
sorted_images = sorted(
|
||||
image_storage.items(),
|
||||
key=lambda item: item[1].timestamp,
|
||||
)
|
||||
|
||||
for filename, _ in sorted_images[:num_to_remove]:
|
||||
image_storage.pop(filename, None)
|
||||
|
||||
|
||||
async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
@@ -225,36 +210,34 @@ async def async_generate_image(
|
||||
if service_result.get("revised_prompt") is None:
|
||||
service_result["revised_prompt"] = instructions
|
||||
|
||||
image_storage = hass.data[DATA_IMAGES]
|
||||
|
||||
if len(image_storage) + 1 > MAX_IMAGES:
|
||||
_cleanup_images(image_storage, len(image_storage) + 1 - MAX_IMAGES)
|
||||
source = hass.data[DATA_MEDIA_SOURCE]
|
||||
|
||||
current_time = datetime.now()
|
||||
ext = mimetypes.guess_extension(task_result.mime_type, False) or ".png"
|
||||
sanitized_task_name = RE_SANITIZE_FILENAME.sub("", slugify(task_name))
|
||||
filename = f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}"
|
||||
|
||||
image_storage[filename] = ImageData(
|
||||
data=image_data,
|
||||
timestamp=int(current_time.timestamp()),
|
||||
mime_type=task_result.mime_type,
|
||||
title=service_result["revised_prompt"],
|
||||
image_file = ImageData(
|
||||
filename=f"{current_time.strftime('%Y-%m-%d_%H%M%S')}_{sanitized_task_name}{ext}",
|
||||
file=io.BytesIO(image_data),
|
||||
content_type=task_result.mime_type,
|
||||
)
|
||||
|
||||
def _purge_image(filename: str, now: datetime) -> None:
|
||||
"""Remove image from storage."""
|
||||
image_storage.pop(filename, None)
|
||||
target_folder = media_source.MediaSourceItem.from_uri(
|
||||
hass, f"media-source://{DOMAIN}/{IMAGE_DIR}", None
|
||||
)
|
||||
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
service_result["media_source_id"] = await source.async_upload_media(
|
||||
target_folder, image_file
|
||||
)
|
||||
|
||||
service_result["url"] = get_url(hass) + async_sign_path(
|
||||
item = media_source.MediaSourceItem.from_uri(
|
||||
hass, service_result["media_source_id"], None
|
||||
)
|
||||
service_result["url"] = async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
(await source.async_resolve_media(item)).url,
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
|
||||
@@ -359,20 +342,8 @@ class GenImageTaskResult:
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ImageData:
|
||||
"""Image data for stored generated images."""
|
||||
"""Implementation of media_source.local_source.UploadedFile protocol."""
|
||||
|
||||
data: bytes
|
||||
"""Raw image data."""
|
||||
|
||||
timestamp: int
|
||||
"""Timestamp when the image was generated, as a Unix timestamp."""
|
||||
|
||||
mime_type: str
|
||||
"""MIME type of the image."""
|
||||
|
||||
title: str
|
||||
"""Title of the image, usually the prompt used to generate it."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return image data as a string."""
|
||||
return f"<ImageData {self.title}: {id(self)}>"
|
||||
filename: str
|
||||
file: io.IOBase
|
||||
content_type: str
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from genie_partner_sdk.client import AladdinConnectClient
|
||||
from genie_partner_sdk.model import GarageDoor
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -36,22 +35,7 @@ async def async_setup_entry(
|
||||
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
|
||||
)
|
||||
|
||||
sdk_doors = await client.get_doors()
|
||||
|
||||
# Convert SDK GarageDoor objects to integration GarageDoor objects
|
||||
doors = [
|
||||
GarageDoor(
|
||||
{
|
||||
"device_id": door.device_id,
|
||||
"door_number": door.door_number,
|
||||
"name": door.name,
|
||||
"status": door.status,
|
||||
"link_status": door.link_status,
|
||||
"battery_level": door.battery_level,
|
||||
}
|
||||
)
|
||||
for door in sdk_doors
|
||||
]
|
||||
doors = await client.get_doors()
|
||||
|
||||
entry.runtime_data = {
|
||||
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
|
||||
|
@@ -41,4 +41,10 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
|
||||
async def _async_update_data(self) -> GarageDoor:
|
||||
"""Fetch data from the Aladdin Connect API."""
|
||||
await self.client.update_door(self.data.device_id, self.data.door_number)
|
||||
self.data.status = self.client.get_door_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
self.data.battery_level = self.client.get_battery_status(
|
||||
self.data.device_id, self.data.door_number
|
||||
)
|
||||
return self.data
|
||||
|
@@ -49,7 +49,9 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
"""Update is closed attribute."""
|
||||
return self.coordinator.data.status == "closed"
|
||||
if (status := self.coordinator.data.status) is None:
|
||||
return None
|
||||
return status == "closed"
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool | None:
|
||||
|
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["genie-partner-sdk==1.0.10"]
|
||||
"requirements": ["genie-partner-sdk==1.0.11"]
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "analytics",
|
||||
"name": "Analytics",
|
||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["api", "websocket_api", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
"integration_type": "system",
|
||||
|
@@ -33,9 +33,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
@@ -46,10 +48,12 @@ from .const import (
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEVICE_ANDROIDTV,
|
||||
DEVICE_FIRETV,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
PROP_WIFIMAC,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
AdbTimeoutError,
|
||||
@@ -63,6 +67,8 @@ ADB_PYTHON_EXCEPTIONS: tuple = (
|
||||
)
|
||||
ADB_TCP_EXCEPTIONS: tuple = (ConnectionResetError, RuntimeError)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
@@ -188,6 +194,12 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Android TV / Fire TV integration."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
|
||||
|
@@ -8,7 +8,6 @@ import logging
|
||||
|
||||
from androidtv.constants import APPS, KEYS
|
||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -17,9 +16,7 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
)
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
@@ -39,19 +36,10 @@ from .const import (
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
from .entity import AndroidTVEntity, adb_decorator
|
||||
from .services import ATTR_ADB_RESPONSE, ATTR_HDMI_INPUT, SERVICE_LEARN_SENDEVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
|
||||
ANDROIDTV_STATES = {
|
||||
"off": MediaPlayerState.OFF,
|
||||
@@ -77,32 +65,6 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_ADB_COMMAND,
|
||||
{vol.Required(ATTR_COMMAND): cv.string},
|
||||
"adb_command",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_LEARN_SENDEVENT, None, "learn_sendevent"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_DOWNLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_download",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_UPLOAD,
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
"service_upload",
|
||||
)
|
||||
|
||||
|
||||
class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
"""Representation of an Android or Fire TV device."""
|
||||
|
66
homeassistant/components/androidtv/services.py
Normal file
66
homeassistant/components/androidtv/services.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Services for Android/Fire TV devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import ATTR_COMMAND
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_ADB_RESPONSE = "adb_response"
|
||||
ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
SERVICE_UPLOAD = "upload"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the Android TV / Fire TV services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_ADB_COMMAND,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={vol.Required(ATTR_COMMAND): cv.string},
|
||||
func="adb_command",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_LEARN_SENDEVENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="learn_sendevent",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_DOWNLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_download",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_UPLOAD,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_DEVICE_PATH): cv.string,
|
||||
vol.Required(ATTR_LOCAL_PATH): cv.string,
|
||||
},
|
||||
func="service_upload",
|
||||
)
|
@@ -16,7 +16,7 @@ from .coordinator import (
|
||||
AOSmithStatusCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER]
|
||||
PLATFORMS: list[Platform] = [Platform.SELECT, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool:
|
||||
|
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"default": "mdi:water-plus"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"default": "mdi:water-thermometer"
|
||||
|
70
homeassistant/components/aosmith/select.py
Normal file
70
homeassistant/components/aosmith/select.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""The select platform for the A. O. Smith integration."""
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AOSmithConfigEntry
|
||||
from .coordinator import AOSmithStatusCoordinator
|
||||
from .entity import AOSmithStatusEntity
|
||||
|
||||
HWP_LEVEL_HA_TO_AOSMITH = {
|
||||
"off": 0,
|
||||
"level1": 1,
|
||||
"level2": 2,
|
||||
"level3": 3,
|
||||
}
|
||||
HWP_LEVEL_AOSMITH_TO_HA = {value: key for key, value in HWP_LEVEL_HA_TO_AOSMITH.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AOSmithConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up A. O. Smith select platform."""
|
||||
data = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AOSmithHotWaterPlusSelectEntity(data.status_coordinator, device.junction_id)
|
||||
for device in data.status_coordinator.data.values()
|
||||
if device.supports_hot_water_plus
|
||||
)
|
||||
|
||||
|
||||
class AOSmithHotWaterPlusSelectEntity(AOSmithStatusEntity, SelectEntity):
|
||||
"""Class for the Hot Water+ select entity."""
|
||||
|
||||
_attr_translation_key = "hot_water_plus_level"
|
||||
_attr_options = list(HWP_LEVEL_HA_TO_AOSMITH)
|
||||
|
||||
def __init__(self, coordinator: AOSmithStatusCoordinator, junction_id: str) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, junction_id)
|
||||
self._attr_unique_id = f"hot_water_plus_level_{junction_id}"
|
||||
|
||||
@property
|
||||
def suggested_object_id(self) -> str | None:
|
||||
"""Override the suggested object id to make '+' get converted to 'plus' in the entity id."""
|
||||
return "hot_water_plus_level"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current Hot Water+ mode."""
|
||||
hot_water_plus_level = self.device.status.hot_water_plus_level
|
||||
return (
|
||||
None
|
||||
if hot_water_plus_level is None
|
||||
else HWP_LEVEL_AOSMITH_TO_HA.get(hot_water_plus_level)
|
||||
)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the Hot Water+ mode."""
|
||||
aosmith_hwp_level = HWP_LEVEL_HA_TO_AOSMITH[option]
|
||||
await self.client.update_mode(
|
||||
junction_id=self.junction_id,
|
||||
mode=self.device.status.current_mode,
|
||||
hot_water_plus_level=aosmith_hwp_level,
|
||||
)
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
@@ -26,6 +26,17 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"select": {
|
||||
"hot_water_plus_level": {
|
||||
"name": "Hot Water+ level",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"level1": "Level 1",
|
||||
"level2": "Level 2",
|
||||
"level3": "Level 3"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"hot_water_availability": {
|
||||
"name": "Hot water availability"
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
"requirements": ["aioapcaccess==1.0.0"]
|
||||
}
|
||||
|
@@ -395,6 +395,7 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"upsmode": SensorEntityDescription(
|
||||
key="upsmode",
|
||||
translation_key="ups_mode",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"upsname": SensorEntityDescription(
|
||||
key="upsname",
|
||||
@@ -466,7 +467,10 @@ async def async_setup_entry(
|
||||
# periodical (or manual) self test since last daemon restart. It might not be available
|
||||
# when we set up the integration, and we do not know if it would ever be available. Here we
|
||||
# add it anyway and mark it as unknown initially.
|
||||
for resource in available_resources | {LAST_S_TEST}:
|
||||
#
|
||||
# We also sort the resources to ensure the order of entities created is deterministic since
|
||||
# "APCMODEL" and "MODEL" resources map to the same "Model" name.
|
||||
for resource in sorted(available_resources | {LAST_S_TEST}):
|
||||
if resource not in SENSORS:
|
||||
_LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper())
|
||||
continue
|
||||
|
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
BIN
homeassistant/components/assist_pipeline/acknowledge.mp3
Normal file
Binary file not shown.
@@ -1,5 +1,7 @@
|
||||
"""Constants for the Assist pipeline integration."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
DOMAIN = "assist_pipeline"
|
||||
|
||||
DATA_CONFIG = f"{DOMAIN}.config"
|
||||
@@ -23,3 +25,5 @@ SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz
|
||||
BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit
|
||||
|
||||
OPTION_PREFERRED = "preferred"
|
||||
|
||||
ACKNOWLEDGE_PATH = Path(__file__).parent / "acknowledge.mp3"
|
||||
|
@@ -23,7 +23,12 @@ from homeassistant.components import conversation, stt, tts, wake_word, websocke
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, intent
|
||||
from homeassistant.helpers import (
|
||||
chat_session,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
intent,
|
||||
)
|
||||
from homeassistant.helpers.collection import (
|
||||
CHANGE_UPDATED,
|
||||
CollectionError,
|
||||
@@ -45,6 +50,7 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
CONF_DEBUG_RECORDING_DIR,
|
||||
DATA_CONFIG,
|
||||
@@ -113,6 +119,7 @@ PIPELINE_FIELDS: VolDictType = {
|
||||
vol.Required("wake_word_entity"): vol.Any(str, None),
|
||||
vol.Required("wake_word_id"): vol.Any(str, None),
|
||||
vol.Optional("prefer_local_intents"): bool,
|
||||
vol.Optional("acknowledge_media_id"): str,
|
||||
}
|
||||
|
||||
STORED_PIPELINE_RUNS = 10
|
||||
@@ -1066,8 +1073,11 @@ class PipelineRun:
|
||||
intent_input: str,
|
||||
conversation_id: str,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
) -> tuple[str, bool]:
|
||||
"""Run intent recognition portion of pipeline.
|
||||
|
||||
Returns (speech, all_targets_in_satellite_area).
|
||||
"""
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
@@ -1116,6 +1126,7 @@ class PipelineRun:
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
@@ -1290,6 +1301,17 @@ class PipelineRun:
|
||||
if tts_input_stream and self._streamed_response_text:
|
||||
tts_input_stream.put_nowait(None)
|
||||
|
||||
if agent_id == conversation.HOME_ASSISTANT_AGENT:
|
||||
# Check if all targeted entities were in the same area as
|
||||
# the satellite device.
|
||||
# If so, the satellite should respond with an acknowledge beep
|
||||
# instead of a full response.
|
||||
all_targets_in_satellite_area = (
|
||||
self._get_all_targets_in_satellite_area(
|
||||
conversation_result.response, self._device_id
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@@ -1312,7 +1334,45 @@ class PipelineRun:
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
return (speech, all_targets_in_satellite_area)
|
||||
|
||||
def _get_all_targets_in_satellite_area(
|
||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
||||
) -> bool:
|
||||
"""Return true if all targeted entities were in the same area as the device."""
|
||||
if (
|
||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
||||
or (not intent_response.matched_states)
|
||||
or (not device_id)
|
||||
):
|
||||
return False
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
if (not (device := device_registry.async_get(device_id))) or (
|
||||
not device.area_id
|
||||
):
|
||||
return False
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
for state in intent_response.matched_states:
|
||||
entity = entity_registry.async_get(state.entity_id)
|
||||
if not entity:
|
||||
return False
|
||||
|
||||
if (entity_area_id := entity.area_id) is None:
|
||||
if (entity.device_id is None) or (
|
||||
(entity_device := device_registry.async_get(entity.device_id))
|
||||
is None
|
||||
):
|
||||
return False
|
||||
|
||||
entity_area_id = entity_device.area_id
|
||||
|
||||
if entity_area_id != device.area_id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
"""Prepare text-to-speech."""
|
||||
@@ -1350,7 +1410,9 @@ class PipelineRun:
|
||||
),
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
async def text_to_speech(
|
||||
self, tts_input: str, override_media_path: Path | None = None
|
||||
) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
@@ -1362,11 +1424,14 @@ class PipelineRun:
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
"acknowledge_override": override_media_path is not None,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
if not self._streamed_response_text:
|
||||
if override_media_path:
|
||||
self.tts_stream.async_override_result(override_media_path)
|
||||
elif not self._streamed_response_text:
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
@@ -1664,16 +1729,20 @@ class PipelineInput:
|
||||
|
||||
if self.run.end_stage != PipelineStage.STT:
|
||||
tts_input = self.tts_input
|
||||
all_targets_in_satellite_area = False
|
||||
|
||||
if current_stage == PipelineStage.INTENT:
|
||||
# intent-recognition
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
(
|
||||
tts_input,
|
||||
all_targets_in_satellite_area,
|
||||
) = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
if all_targets_in_satellite_area or tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
else:
|
||||
# Skip TTS
|
||||
@@ -1682,8 +1751,14 @@ class PipelineInput:
|
||||
if self.run.end_stage != PipelineStage.INTENT:
|
||||
# text-to-speech
|
||||
if current_stage == PipelineStage.TTS:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
if all_targets_in_satellite_area:
|
||||
# Use acknowledge media instead of full response
|
||||
await self.run.text_to_speech(
|
||||
tts_input or "", override_media_path=ACKNOWLEDGE_PATH
|
||||
)
|
||||
else:
|
||||
assert tts_input is not None
|
||||
await self.run.text_to_speech(tts_input)
|
||||
|
||||
except PipelineError as err:
|
||||
self.run.process_event(
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import replace
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, Platform
|
||||
@@ -64,15 +65,36 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
translation_key="pipeline",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_current_option = OPTION_PREFERRED
|
||||
_attr_options = [OPTION_PREFERRED]
|
||||
|
||||
def __init__(self, hass: HomeAssistant, domain: str, unique_id_prefix: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
unique_id_prefix: str,
|
||||
index: int = 0,
|
||||
) -> None:
|
||||
"""Initialize a pipeline selector."""
|
||||
if index < 1:
|
||||
# Keep compatibility
|
||||
key_suffix = ""
|
||||
placeholder = ""
|
||||
else:
|
||||
key_suffix = f"_{index + 1}"
|
||||
placeholder = f" {index + 1}"
|
||||
|
||||
self.entity_description = replace(
|
||||
self.entity_description,
|
||||
key=f"pipeline{key_suffix}",
|
||||
translation_placeholders={"index": placeholder},
|
||||
)
|
||||
|
||||
self._domain = domain
|
||||
self._unique_id_prefix = unique_id_prefix
|
||||
self._attr_unique_id = f"{unique_id_prefix}-pipeline"
|
||||
self._attr_unique_id = f"{unique_id_prefix}-{self.entity_description.key}"
|
||||
self.hass = hass
|
||||
self._update_options()
|
||||
|
||||
@@ -87,7 +109,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
)
|
||||
|
||||
state = await self.async_get_last_state()
|
||||
if state is not None and state.state in self.options:
|
||||
if (state is not None) and (state.state in self.options):
|
||||
self._attr_current_option = state.state
|
||||
|
||||
if self.registry_entry and (device_id := self.registry_entry.device_id):
|
||||
@@ -97,7 +119,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
|
||||
def cleanup() -> None:
|
||||
"""Clean up registered device."""
|
||||
pipeline_data.pipeline_devices.pop(device_id)
|
||||
pipeline_data.pipeline_devices.pop(device_id, None)
|
||||
|
||||
self.async_on_remove(cleanup)
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"select": {
|
||||
"pipeline": {
|
||||
"name": "Assistant",
|
||||
"name": "Assistant{index}",
|
||||
"state": {
|
||||
"preferred": "Preferred"
|
||||
}
|
||||
|
@@ -120,6 +120,7 @@ class AsusWrtBridge(ABC):
|
||||
|
||||
def __init__(self, host: str) -> None:
|
||||
"""Initialize Bridge."""
|
||||
self._configuration_url = f"http://{host}"
|
||||
self._host = host
|
||||
self._firmware: str | None = None
|
||||
self._label_mac: str | None = None
|
||||
@@ -127,6 +128,11 @@ class AsusWrtBridge(ABC):
|
||||
self._model_id: str | None = None
|
||||
self._serial_number: str | None = None
|
||||
|
||||
@property
|
||||
def configuration_url(self) -> str:
|
||||
"""Return configuration URL."""
|
||||
return self._configuration_url
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
"""Return hostname."""
|
||||
@@ -371,6 +377,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
# get main router properties
|
||||
if mac := _identity.mac:
|
||||
self._label_mac = format_mac(mac)
|
||||
self._configuration_url = self._api.webpanel
|
||||
self._firmware = str(_identity.firmware)
|
||||
self._model = _identity.model
|
||||
self._model_id = _identity.product_id
|
||||
|
@@ -388,13 +388,13 @@ class AsusWrtRouter:
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device information."""
|
||||
info = DeviceInfo(
|
||||
configuration_url=self._api.configuration_url,
|
||||
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
||||
name=self.host,
|
||||
model=self._api.model or "Asus Router",
|
||||
model_id=self._api.model_id,
|
||||
serial_number=self._api.serial_number,
|
||||
manufacturer="Asus",
|
||||
configuration_url=f"http://{self.host}",
|
||||
)
|
||||
if self._api.firmware:
|
||||
info["sw_version"] = self._api.firmware
|
||||
|
@@ -2,13 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from yalexs.activity import ActivityType, ActivityTypes
|
||||
from yalexs.lock import Lock, LockStatus
|
||||
from yalexs.activity import ActivityType
|
||||
from yalexs.lock import Lock, LockOperation, LockStatus
|
||||
from yalexs.util import get_latest_activity, update_lock_detail_from_activity
|
||||
|
||||
from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature
|
||||
@@ -50,30 +49,25 @@ class AugustLock(AugustEntity, RestoreEntity, LockEntity):
|
||||
|
||||
async def async_lock(self, **kwargs: Any) -> None:
|
||||
"""Lock the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_lock_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_lock)
|
||||
await self._perform_lock_operation(LockOperation.LOCK)
|
||||
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
"""Open/unlatch the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_unlatch_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_unlatch)
|
||||
await self._perform_lock_operation(LockOperation.OPEN)
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
"""Unlock the device."""
|
||||
if self._data.push_updates_connected:
|
||||
await self._data.async_unlock_async(self._device_id, self._hyper_bridge)
|
||||
return
|
||||
await self._call_lock_operation(self._data.async_unlock)
|
||||
await self._perform_lock_operation(LockOperation.UNLOCK)
|
||||
|
||||
async def _call_lock_operation(
|
||||
self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]]
|
||||
) -> None:
|
||||
async def _perform_lock_operation(self, operation: LockOperation) -> None:
|
||||
"""Perform a lock operation."""
|
||||
try:
|
||||
activities = await lock_operation(self._device_id)
|
||||
activities = await self._data.async_operate_lock(
|
||||
self._device_id,
|
||||
operation,
|
||||
self._data.push_updates_connected,
|
||||
self._hyper_bridge,
|
||||
)
|
||||
except ClientResponseError as err:
|
||||
if err.status == LOCK_JAMMED_ERR:
|
||||
self._detail.lock_status = LockStatus.JAMMED
|
||||
|
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.0.1", "yalexs-ble==3.1.2"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||
}
|
||||
|
@@ -92,7 +92,11 @@ from homeassistant.components.http.ban import (
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.network import is_cloud_connection
|
||||
from homeassistant.helpers.network import (
|
||||
NoURLAvailableError,
|
||||
get_url,
|
||||
is_cloud_connection,
|
||||
)
|
||||
from homeassistant.util.network import is_local
|
||||
|
||||
from . import indieauth
|
||||
@@ -125,11 +129,18 @@ class WellKnownOAuthInfoView(HomeAssistantView):
|
||||
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Return the well known OAuth2 authorization info."""
|
||||
hass = request.app[KEY_HASS]
|
||||
# Some applications require absolute urls, so we prefer using the
|
||||
# current requests url if possible, with fallback to a relative url.
|
||||
try:
|
||||
url_prefix = get_url(hass, require_current_request=True)
|
||||
except NoURLAvailableError:
|
||||
url_prefix = ""
|
||||
return self.json(
|
||||
{
|
||||
"authorization_endpoint": "/auth/authorize",
|
||||
"token_endpoint": "/auth/token",
|
||||
"revocation_endpoint": "/auth/revoke",
|
||||
"authorization_endpoint": f"{url_prefix}/auth/authorize",
|
||||
"token_endpoint": f"{url_prefix}/auth/token",
|
||||
"revocation_endpoint": f"{url_prefix}/auth/revoke",
|
||||
"response_types_supported": ["code"],
|
||||
"service_documentation": (
|
||||
"https://developers.home-assistant.io/docs/auth_api"
|
||||
|
@@ -8,7 +8,7 @@ import threading
|
||||
from typing import IO, cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION, CONTENT_TYPE
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
from multidict import istr
|
||||
|
||||
@@ -76,7 +76,8 @@ class DownloadBackupView(HomeAssistantView):
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar",
|
||||
CONTENT_TYPE: "application/x-tar",
|
||||
}
|
||||
|
||||
try:
|
||||
|
@@ -14,15 +14,15 @@
|
||||
},
|
||||
"automatic_backup_failed_addons": {
|
||||
"title": "Not all add-ons could be included in automatic backup",
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_agents_addons_folders": {
|
||||
"title": "Automatic backup was created with errors",
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the core and supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the Core and Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_folders": {
|
||||
"title": "Not all folders could be included in automatic backup",
|
||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the Supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@@ -497,16 +497,18 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Observation for entity '%s' returned None, it will not be used"
|
||||
" for Bayesian updating"
|
||||
" for updating Bayesian sensor '%s'"
|
||||
),
|
||||
observation.entity_id,
|
||||
self.entity_id,
|
||||
)
|
||||
continue
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Observation for template entity returned None rather than a valid"
|
||||
" boolean, it will not be used for Bayesian updating"
|
||||
" boolean, it will not be used for updating Bayesian sensor '%s'"
|
||||
),
|
||||
self.entity_id,
|
||||
)
|
||||
# the prior has been updated and is now the posterior
|
||||
return prior
|
||||
|
@@ -18,9 +18,9 @@
|
||||
"bleak==1.0.1",
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.2"
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
@@ -18,8 +18,10 @@ async def async_get_config_entry_diagnostics(
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
device_info = await coordinator.client.get_system_info()
|
||||
command_list = await coordinator.client.get_command_list()
|
||||
|
||||
return {
|
||||
"remote_command_list": command_list,
|
||||
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
|
||||
"device_info": async_redact_data(device_info, TO_REDACT),
|
||||
}
|
||||
|
@@ -2,28 +2,40 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from brother import Brother, SnmpError
|
||||
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Set up Brother from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[SECTION_ADVANCED_SETTINGS][CONF_PORT]
|
||||
community = entry.data[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY]
|
||||
printer_type = entry.data[CONF_TYPE]
|
||||
|
||||
snmp_engine = await async_get_snmp_engine(hass)
|
||||
try:
|
||||
brother = await Brother.create(
|
||||
host, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
host, port, community, printer_type=printer_type, snmp_engine=snmp_engine
|
||||
)
|
||||
except (ConnectionError, SnmpError, TimeoutError) as error:
|
||||
raise ConfigEntryNotReady(
|
||||
@@ -48,3 +60,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> bool:
|
||||
"""Migrate an old entry."""
|
||||
if entry.version == 1 and entry.minor_version < 2:
|
||||
new_data = entry.data.copy()
|
||||
new_data[SECTION_ADVANCED_SETTINGS] = {
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_COMMUNITY: DEFAULT_COMMUNITY,
|
||||
}
|
||||
hass.config_entries.async_update_entry(entry, data=new_data, minor_version=2)
|
||||
|
||||
_LOGGER.info(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
entry.version,
|
||||
entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
@@ -9,21 +9,65 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_TYPE
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
from .const import DOMAIN, PRINTER_TYPES
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
vol.Required(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): str,
|
||||
},
|
||||
),
|
||||
{"collapsed": True},
|
||||
),
|
||||
}
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
async def validate_input(
|
||||
@@ -35,7 +79,12 @@ async def validate_input(
|
||||
|
||||
snmp_engine = await async_get_snmp_engine(hass)
|
||||
|
||||
brother = await Brother.create(user_input[CONF_HOST], snmp_engine=snmp_engine)
|
||||
brother = await Brother.create(
|
||||
user_input[CONF_HOST],
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_PORT],
|
||||
user_input[SECTION_ADVANCED_SETTINGS][CONF_COMMUNITY],
|
||||
snmp_engine=snmp_engine,
|
||||
)
|
||||
await brother.async_update()
|
||||
|
||||
if expected_mac is not None and brother.serial.lower() != expected_mac:
|
||||
@@ -48,6 +97,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Brother Printer."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
@@ -126,13 +176,11 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title = f"{self.brother.model} {self.brother.serial}"
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data={CONF_HOST: self.host, CONF_TYPE: user_input[CONF_TYPE]},
|
||||
data={CONF_HOST: self.host, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES)}
|
||||
),
|
||||
data_schema=ZEROCONF_SCHEMA,
|
||||
description_placeholders={
|
||||
"serial_number": self.brother.serial,
|
||||
"model": self.brother.model,
|
||||
@@ -160,7 +208,7 @@ class BrotherConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates={CONF_HOST: user_input[CONF_HOST]},
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -10,3 +10,10 @@ DOMAIN: Final = "brother"
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||
|
||||
CONF_COMMUNITY = "community"
|
||||
|
||||
DEFAULT_COMMUNITY = "public"
|
||||
DEFAULT_PORT = 161
|
||||
|
@@ -8,7 +8,21 @@
|
||||
"type": "Type of the printer"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control."
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "Brother printer type: ink or laser."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
@@ -16,6 +30,22 @@
|
||||
"title": "Discovered Brother Printer",
|
||||
"data": {
|
||||
"type": "[%key:component::brother::config::step::user::data::type%]"
|
||||
},
|
||||
"data_description": {
|
||||
"type": "[%key:component::brother::config::step::user::data_description::type%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
@@ -25,6 +55,19 @@
|
||||
},
|
||||
"data_description": {
|
||||
"host": "[%key:component::brother::config::step::user::data_description::host%]"
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
"name": "Advanced settings",
|
||||
"data": {
|
||||
"port": "[%key:common::config_flow::data::port%]",
|
||||
"community": "SNMP Community"
|
||||
},
|
||||
"data_description": {
|
||||
"port": "The SNMP port of the Brother printer.",
|
||||
"community": "A simple password for devices to communicate to each other."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.13.1"]
|
||||
"requirements": ["bthome-ble==3.14.2"]
|
||||
}
|
||||
|
@@ -25,6 +25,7 @@ from homeassistant.const import (
|
||||
DEGREE,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
EntityCategory,
|
||||
UnitOfConductivity,
|
||||
@@ -269,6 +270,15 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Rotational speed (rpm)
|
||||
(
|
||||
BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED,
|
||||
Units.REVOLUTIONS_PER_MINUTE,
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
# Signal Strength (RSSI) (dB)
|
||||
(
|
||||
BTHomeSensorDeviceClass.SIGNAL_STRENGTH,
|
||||
|
@@ -37,6 +37,10 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.loader import (
|
||||
async_get_custom_components,
|
||||
async_get_loaded_integration,
|
||||
)
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
|
||||
from .alexa_config import entity_supported as entity_supported_by_alexa
|
||||
@@ -431,6 +435,79 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
url = "/api/cloud/support_package"
|
||||
name = "api:cloud:support_package"
|
||||
|
||||
async def _get_integration_info(self, hass: HomeAssistant) -> dict[str, Any]:
|
||||
"""Collect information about active and custom integrations."""
|
||||
# Get loaded components from hass.config.components
|
||||
loaded_components = hass.config.components.copy()
|
||||
|
||||
# Get custom integrations
|
||||
custom_domains = set()
|
||||
with suppress(Exception):
|
||||
custom_domains = set(await async_get_custom_components(hass))
|
||||
|
||||
# Separate built-in and custom integrations
|
||||
builtin_integrations = []
|
||||
custom_integrations = []
|
||||
|
||||
for domain in sorted(loaded_components):
|
||||
try:
|
||||
integration = async_get_loaded_integration(hass, domain)
|
||||
except Exception: # noqa: BLE001
|
||||
# Broad exception catch for robustness in support package
|
||||
# generation. If we can't get integration info,
|
||||
# just add the domain
|
||||
if domain in custom_domains:
|
||||
custom_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": "Unknown",
|
||||
"version": "Unknown",
|
||||
"documentation": "Unknown",
|
||||
}
|
||||
)
|
||||
else:
|
||||
builtin_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": "Unknown",
|
||||
}
|
||||
)
|
||||
else:
|
||||
if domain in custom_domains:
|
||||
# This is a custom integration
|
||||
# include version and documentation link
|
||||
version = (
|
||||
str(integration.version) if integration.version else "Unknown"
|
||||
)
|
||||
if not (documentation := integration.documentation):
|
||||
documentation = "Unknown"
|
||||
|
||||
custom_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": integration.name,
|
||||
"version": version,
|
||||
"documentation": documentation,
|
||||
}
|
||||
)
|
||||
else:
|
||||
# This is a built-in integration.
|
||||
# No version needed, as it is always the same as the
|
||||
# Home Assistant version
|
||||
builtin_integrations.append(
|
||||
{
|
||||
"domain": domain,
|
||||
"name": integration.name,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"builtin_count": len(builtin_integrations),
|
||||
"builtin_integrations": builtin_integrations,
|
||||
"custom_count": len(custom_integrations),
|
||||
"custom_integrations": custom_integrations,
|
||||
}
|
||||
|
||||
async def _generate_markdown(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -453,6 +530,38 @@ class DownloadSupportPackageView(HomeAssistantView):
|
||||
markdown = "## System Information\n\n"
|
||||
markdown += get_domain_table_markdown(hass_info)
|
||||
|
||||
# Add integration information
|
||||
try:
|
||||
integration_info = await self._get_integration_info(hass)
|
||||
except Exception: # noqa: BLE001
|
||||
# Broad exception catch for robustness in support package generation
|
||||
# If there's any error getting integration info, just note it
|
||||
markdown += "## Active integrations\n\n"
|
||||
markdown += "Unable to collect integration information\n\n"
|
||||
else:
|
||||
markdown += "## Active Integrations\n\n"
|
||||
markdown += f"Built-in integrations: {integration_info['builtin_count']}\n"
|
||||
markdown += f"Custom integrations: {integration_info['custom_count']}\n\n"
|
||||
|
||||
# Built-in integrations
|
||||
if integration_info["builtin_integrations"]:
|
||||
markdown += "<details><summary>Built-in integrations</summary>\n\n"
|
||||
markdown += "Domain | Name\n"
|
||||
markdown += "--- | ---\n"
|
||||
for integration in integration_info["builtin_integrations"]:
|
||||
markdown += f"{integration['domain']} | {integration['name']}\n"
|
||||
markdown += "\n</details>\n\n"
|
||||
|
||||
# Custom integrations
|
||||
if integration_info["custom_integrations"]:
|
||||
markdown += "<details><summary>Custom integrations</summary>\n\n"
|
||||
markdown += "Domain | Name | Version | Documentation\n"
|
||||
markdown += "--- | --- | --- | ---\n"
|
||||
for integration in integration_info["custom_integrations"]:
|
||||
doc_url = integration.get("documentation") or "N/A"
|
||||
markdown += f"{integration['domain']} | {integration['name']} | {integration['version']} | {doc_url}\n"
|
||||
markdown += "\n</details>\n\n"
|
||||
|
||||
for domain, domain_info in domains_info.items():
|
||||
domain_info_md = get_domain_table_markdown(domain_info)
|
||||
markdown += (
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.0"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from abc import abstractmethod
|
||||
from datetime import timedelta
|
||||
from typing import TypeVar
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from aiocomelit.api import (
|
||||
AlarmDataObject,
|
||||
@@ -13,7 +13,16 @@ from aiocomelit.api import (
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit.const import BRIDGE, VEDO
|
||||
from aiocomelit.const import (
|
||||
BRIDGE,
|
||||
CLIMATE,
|
||||
COVER,
|
||||
IRRIGATION,
|
||||
LIGHT,
|
||||
OTHER,
|
||||
SCENARIO,
|
||||
VEDO,
|
||||
)
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession
|
||||
|
||||
@@ -111,6 +120,32 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
async def _async_update_system_data(self) -> T:
|
||||
"""Class method for updating data."""
|
||||
|
||||
async def _async_remove_stale_devices(
|
||||
self,
|
||||
previous_list: dict[int, Any],
|
||||
current_list: dict[int, Any],
|
||||
dev_type: str,
|
||||
) -> None:
|
||||
"""Remove stale devices."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for i in previous_list:
|
||||
if i not in current_list:
|
||||
_LOGGER.debug(
|
||||
"Detected change in %s devices: index %s removed",
|
||||
dev_type,
|
||||
i,
|
||||
)
|
||||
identifier = f"{self.config_entry.entry_id}-{dev_type}-{i}"
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, identifier)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
||||
|
||||
class ComelitSerialBridge(
|
||||
ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]]
|
||||
@@ -137,7 +172,15 @@ class ComelitSerialBridge(
|
||||
self,
|
||||
) -> dict[str, dict[int, ComelitSerialBridgeObject]]:
|
||||
"""Specific method for updating data."""
|
||||
return await self.api.get_all_devices()
|
||||
data = await self.api.get_all_devices()
|
||||
|
||||
if self.data:
|
||||
for dev_type in (CLIMATE, COVER, LIGHT, IRRIGATION, OTHER, SCENARIO):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[dev_type], data[dev_type], dev_type
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
@@ -163,4 +206,14 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
self,
|
||||
) -> AlarmDataObject:
|
||||
"""Specific method for updating data."""
|
||||
return await self.api.get_all_areas_and_zones()
|
||||
data = await self.api.get_all_areas_and_zones()
|
||||
|
||||
if self.data:
|
||||
for obj_type in ("alarm_areas", "alarm_zones"):
|
||||
await self._async_remove_stale_devices(
|
||||
self.data[obj_type],
|
||||
data[obj_type],
|
||||
"area" if obj_type == "alarm_areas" else "zone",
|
||||
)
|
||||
|
||||
return data
|
||||
|
@@ -72,9 +72,7 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: missing implementation
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
45
homeassistant/components/compit/__init__.py
Normal file
45
homeassistant/components/compit/__init__.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""The Compit integration."""
|
||||
|
||||
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||
"""Set up Compit from a config entry."""
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
connector = CompitApiConnector(session)
|
||||
try:
|
||||
connected = await connector.init(
|
||||
entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], hass.config.language
|
||||
)
|
||||
except CannotConnect as e:
|
||||
raise ConfigEntryNotReady(f"Error while connecting to Compit: {e}") from e
|
||||
except InvalidAuth as e:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Invalid credentials for {entry.data[CONF_EMAIL]}"
|
||||
) from e
|
||||
|
||||
if not connected:
|
||||
raise ConfigEntryAuthFailed("Authentication API error")
|
||||
|
||||
coordinator = CompitDataUpdateCoordinator(hass, entry, connector)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CompitConfigEntry) -> bool:
|
||||
"""Unload an entry for the Compit integration."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
264
homeassistant/components/compit/climate.py
Normal file
264
homeassistant/components/compit/climate.py
Normal file
@@ -0,0 +1,264 @@
|
||||
"""Module contains the CompitClimate class for controlling climate entities."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api import Param, Parameter
|
||||
from compit_inext_api.consts import (
|
||||
CompitFanMode,
|
||||
CompitHVACMode,
|
||||
CompitParameter,
|
||||
CompitPresetMode,
|
||||
)
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
FAN_OFF,
|
||||
PRESET_AWAY,
|
||||
PRESET_ECO,
|
||||
PRESET_HOME,
|
||||
PRESET_NONE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER_NAME
|
||||
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# Device class for climate devices in Compit system
|
||||
CLIMATE_DEVICE_CLASS = 10
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
COMPIT_MODE_MAP = {
|
||||
CompitHVACMode.COOL: HVACMode.COOL,
|
||||
CompitHVACMode.HEAT: HVACMode.HEAT,
|
||||
CompitHVACMode.OFF: HVACMode.OFF,
|
||||
}
|
||||
|
||||
COMPIT_FANSPEED_MAP = {
|
||||
CompitFanMode.OFF: FAN_OFF,
|
||||
CompitFanMode.AUTO: FAN_AUTO,
|
||||
CompitFanMode.LOW: FAN_LOW,
|
||||
CompitFanMode.MEDIUM: FAN_MEDIUM,
|
||||
CompitFanMode.HIGH: FAN_HIGH,
|
||||
CompitFanMode.HOLIDAY: FAN_AUTO,
|
||||
}
|
||||
|
||||
COMPIT_PRESET_MAP = {
|
||||
CompitPresetMode.AUTO: PRESET_HOME,
|
||||
CompitPresetMode.HOLIDAY: PRESET_ECO,
|
||||
CompitPresetMode.MANUAL: PRESET_NONE,
|
||||
CompitPresetMode.AWAY: PRESET_AWAY,
|
||||
}
|
||||
|
||||
HVAC_MODE_TO_COMPIT_MODE = {v: k for k, v in COMPIT_MODE_MAP.items()}
|
||||
FAN_MODE_TO_COMPIT_FAN_MODE = {v: k for k, v in COMPIT_FANSPEED_MAP.items()}
|
||||
PRESET_MODE_TO_COMPIT_PRESET_MODE = {v: k for k, v in COMPIT_PRESET_MAP.items()}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CompitConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the CompitClimate platform from a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
climate_entities = []
|
||||
for device_id in coordinator.connector.devices:
|
||||
device = coordinator.connector.devices[device_id]
|
||||
|
||||
if device.definition.device_class == CLIMATE_DEVICE_CLASS:
|
||||
climate_entities.append(
|
||||
CompitClimate(
|
||||
coordinator,
|
||||
device_id,
|
||||
{
|
||||
parameter.parameter_code: parameter
|
||||
for parameter in device.definition.parameters
|
||||
},
|
||||
device.definition.name,
|
||||
)
|
||||
)
|
||||
|
||||
async_add_devices(climate_entities)
|
||||
|
||||
|
||||
class CompitClimate(CoordinatorEntity[CompitDataUpdateCoordinator], ClimateEntity):
|
||||
"""Representation of a Compit climate device."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [*COMPIT_MODE_MAP.values()]
|
||||
_attr_name = None
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CompitDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
parameters: dict[str, Parameter],
|
||||
device_name: str,
|
||||
) -> None:
|
||||
"""Initialize the climate device."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{device_name}_{device_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(device_id))},
|
||||
name=device_name,
|
||||
manufacturer=MANUFACTURER_NAME,
|
||||
model=device_name,
|
||||
)
|
||||
|
||||
self.parameters = parameters
|
||||
self.device_id = device_id
|
||||
self.available_presets: Parameter | None = self.parameters.get(
|
||||
CompitParameter.PRESET_MODE.value
|
||||
)
|
||||
self.available_fan_modes: Parameter | None = self.parameters.get(
|
||||
CompitParameter.FAN_MODE.value
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available and self.device_id in self.coordinator.connector.devices
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
value = self.get_parameter_value(CompitParameter.CURRENT_TEMPERATURE)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value.value)
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
value = self.get_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE)
|
||||
if value is None:
|
||||
return None
|
||||
return float(value.value)
|
||||
|
||||
@cached_property
|
||||
def preset_modes(self) -> list[str] | None:
|
||||
"""Return the available preset modes."""
|
||||
if self.available_presets is None or self.available_presets.details is None:
|
||||
return []
|
||||
|
||||
preset_modes = []
|
||||
for item in self.available_presets.details:
|
||||
if item is not None:
|
||||
ha_preset = COMPIT_PRESET_MAP.get(CompitPresetMode(item.state))
|
||||
if ha_preset and ha_preset not in preset_modes:
|
||||
preset_modes.append(ha_preset)
|
||||
|
||||
return preset_modes
|
||||
|
||||
@cached_property
|
||||
def fan_modes(self) -> list[str] | None:
|
||||
"""Return the available fan modes."""
|
||||
if self.available_fan_modes is None or self.available_fan_modes.details is None:
|
||||
return []
|
||||
|
||||
fan_modes = []
|
||||
for item in self.available_fan_modes.details:
|
||||
if item is not None:
|
||||
ha_fan_mode = COMPIT_FANSPEED_MAP.get(CompitFanMode(item.state))
|
||||
if ha_fan_mode and ha_fan_mode not in fan_modes:
|
||||
fan_modes.append(ha_fan_mode)
|
||||
|
||||
return fan_modes
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
preset_mode = self.get_parameter_value(CompitParameter.PRESET_MODE)
|
||||
|
||||
if preset_mode:
|
||||
compit_preset_mode = CompitPresetMode(preset_mode.value)
|
||||
return COMPIT_PRESET_MAP.get(compit_preset_mode)
|
||||
return None
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self.get_parameter_value(CompitParameter.FAN_MODE)
|
||||
if fan_mode:
|
||||
compit_fan_mode = CompitFanMode(fan_mode.value)
|
||||
return COMPIT_FANSPEED_MAP.get(compit_fan_mode)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return the current HVAC mode."""
|
||||
hvac_mode = self.get_parameter_value(CompitParameter.HVAC_MODE)
|
||||
if hvac_mode:
|
||||
compit_hvac_mode = CompitHVACMode(hvac_mode.value)
|
||||
return COMPIT_MODE_MAP.get(compit_hvac_mode)
|
||||
return None
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||
if temp is None:
|
||||
raise ServiceValidationError("Temperature argument missing")
|
||||
await self.set_parameter_value(CompitParameter.SET_TARGET_TEMPERATURE, temp)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target HVAC mode."""
|
||||
|
||||
if not (mode := HVAC_MODE_TO_COMPIT_MODE.get(hvac_mode)):
|
||||
raise ServiceValidationError(f"Invalid hvac mode {hvac_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.HVAC_MODE, mode.value)
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
|
||||
compit_preset = PRESET_MODE_TO_COMPIT_PRESET_MODE.get(preset_mode)
|
||||
if compit_preset is None:
|
||||
raise ServiceValidationError(f"Invalid preset mode: {preset_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.PRESET_MODE, compit_preset.value)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
|
||||
compit_fan_mode = FAN_MODE_TO_COMPIT_FAN_MODE.get(fan_mode)
|
||||
if compit_fan_mode is None:
|
||||
raise ServiceValidationError(f"Invalid fan mode: {fan_mode}")
|
||||
|
||||
await self.set_parameter_value(CompitParameter.FAN_MODE, compit_fan_mode.value)
|
||||
|
||||
async def set_parameter_value(self, parameter: CompitParameter, value: int) -> None:
|
||||
"""Call the API to set a parameter to a new value."""
|
||||
await self.coordinator.connector.set_device_parameter(
|
||||
self.device_id, parameter, value
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def get_parameter_value(self, parameter: CompitParameter) -> Param | None:
|
||||
"""Get the parameter value from the device state."""
|
||||
return self.coordinator.connector.get_device_parameter(
|
||||
self.device_id, parameter
|
||||
)
|
110
homeassistant/components/compit/config_flow.py
Normal file
110
homeassistant/components/compit/config_flow.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Config flow for Compit integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from compit_inext_api import CannotConnect, CompitApiConnector, InvalidAuth
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_EMAIL): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_REAUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CompitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Compit."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_create_clientsession(self.hass)
|
||||
api = CompitApiConnector(session)
|
||||
success = False
|
||||
try:
|
||||
success = await api.init(
|
||||
user_input[CONF_EMAIL],
|
||||
user_input[CONF_PASSWORD],
|
||||
self.hass.config.language,
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if not success:
|
||||
# Api returned unexpected result but no exception
|
||||
_LOGGER.error("Compit api returned unexpected result")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_EMAIL])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data_updates=user_input
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_EMAIL], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle re-auth."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm re-authentication."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
reauth_entry_data = reauth_entry.data
|
||||
|
||||
if user_input:
|
||||
# Reuse async_step_user with combined credentials
|
||||
return await self.async_step_user(
|
||||
{
|
||||
CONF_EMAIL: reauth_entry_data[CONF_EMAIL],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_SCHEMA,
|
||||
description_placeholders={CONF_EMAIL: reauth_entry_data[CONF_EMAIL]},
|
||||
errors=errors,
|
||||
)
|
4
homeassistant/components/compit/const.py
Normal file
4
homeassistant/components/compit/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the Compit integration."""
|
||||
|
||||
DOMAIN = "compit"
|
||||
MANUFACTURER_NAME = "Compit"
|
43
homeassistant/components/compit/coordinator.py
Normal file
43
homeassistant/components/compit/coordinator.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Define an object to manage fetching Compit data."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from compit_inext_api import CompitApiConnector, DeviceInstance
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
type CompitConfigEntry = ConfigEntry[CompitDataUpdateCoordinator]
|
||||
|
||||
|
||||
class CompitDataUpdateCoordinator(DataUpdateCoordinator[dict[int, DeviceInstance]]):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
connector: CompitApiConnector,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.connector = connector
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[int, DeviceInstance]:
|
||||
"""Update data via library."""
|
||||
await self.connector.update_state(device_id=None) # Update all devices
|
||||
return self.connector.devices
|
12
homeassistant/components/compit/manifest.json
Normal file
12
homeassistant/components/compit/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "compit",
|
||||
"name": "Compit",
|
||||
"codeowners": ["@Przemko92"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/compit",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["compit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["compit-inext-api==0.2.1"]
|
||||
}
|
86
homeassistant/components/compit/quality_scale.yaml
Normal file
86
homeassistant/components/compit/quality_scale.yaml
Normal file
@@ -0,0 +1,86 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not use any common modules.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any entities that should disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no need for icon translations.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: done
|
35
homeassistant/components/compit/strings.json
Normal file
35
homeassistant/components/compit/strings.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Please enter your https://inext.compit.pl/ credentials.",
|
||||
"title": "Connect to Compit iNext",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"email": "The email address of your inext.compit.pl account",
|
||||
"password": "The password of your inext.compit.pl account"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "Please update your password for {email}",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::compit::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
@@ -50,14 +50,13 @@ from .const import (
|
||||
ATTR_LANGUAGE,
|
||||
ATTR_TEXT,
|
||||
DATA_COMPONENT,
|
||||
DATA_DEFAULT_ENTITY,
|
||||
DOMAIN,
|
||||
HOME_ASSISTANT_AGENT,
|
||||
SERVICE_PROCESS,
|
||||
SERVICE_RELOAD,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .default_agent import DefaultAgent, async_setup_default_agent
|
||||
from .default_agent import async_setup_default_agent
|
||||
from .entity import ConversationEntity
|
||||
from .http import async_setup as async_setup_conversation_http
|
||||
from .models import AbstractConversationAgent, ConversationInput, ConversationResult
|
||||
@@ -142,7 +141,7 @@ def async_unset_agent(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Set the agent to handle the conversations."""
|
||||
"""Unset the agent to handle the conversations."""
|
||||
get_agent_manager(hass).async_unset_agent(config_entry.entry_id)
|
||||
|
||||
|
||||
@@ -241,10 +240,10 @@ async def async_handle_sentence_triggers(
|
||||
|
||||
Returns None if no match occurred.
|
||||
"""
|
||||
default_agent = async_get_agent(hass)
|
||||
assert isinstance(default_agent, DefaultAgent)
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
return await default_agent.async_handle_sentence_triggers(user_input)
|
||||
return await agent.async_handle_sentence_triggers(user_input)
|
||||
|
||||
|
||||
async def async_handle_intents(
|
||||
@@ -257,12 +256,10 @@ async def async_handle_intents(
|
||||
|
||||
Returns None if no match occurred.
|
||||
"""
|
||||
default_agent = async_get_agent(hass)
|
||||
assert isinstance(default_agent, DefaultAgent)
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
return await default_agent.async_handle_intents(
|
||||
user_input, intent_filter=intent_filter
|
||||
)
|
||||
return await agent.async_handle_intents(user_input, intent_filter=intent_filter)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@@ -298,9 +295,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def handle_reload(service: ServiceCall) -> None:
|
||||
"""Reload intents."""
|
||||
await hass.data[DATA_DEFAULT_ENTITY].async_reload(
|
||||
language=service.data.get(ATTR_LANGUAGE)
|
||||
)
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
if agent is not None:
|
||||
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -12,7 +12,7 @@ from homeassistant.core import Context, HomeAssistant, async_get_hass, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent, singleton
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY, HOME_ASSISTANT_AGENT
|
||||
from .const import DATA_COMPONENT, HOME_ASSISTANT_AGENT
|
||||
from .entity import ConversationEntity
|
||||
from .models import (
|
||||
AbstractConversationAgent,
|
||||
@@ -28,6 +28,9 @@ from .trace import (
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .default_agent import DefaultAgent
|
||||
|
||||
|
||||
@singleton.singleton("conversation_agent")
|
||||
@callback
|
||||
@@ -49,8 +52,10 @@ def async_get_agent(
|
||||
hass: HomeAssistant, agent_id: str | None = None
|
||||
) -> AbstractConversationAgent | ConversationEntity | None:
|
||||
"""Get specified agent."""
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
if agent_id is None or agent_id == HOME_ASSISTANT_AGENT:
|
||||
return hass.data[DATA_DEFAULT_ENTITY]
|
||||
return manager.default_agent
|
||||
|
||||
if "." in agent_id:
|
||||
return hass.data[DATA_COMPONENT].get_entity(agent_id)
|
||||
@@ -134,6 +139,7 @@ class AgentManager:
|
||||
"""Initialize the conversation agents."""
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
|
||||
@callback
|
||||
def async_get_agent(self, agent_id: str) -> AbstractConversationAgent | None:
|
||||
@@ -182,3 +188,7 @@ class AgentManager:
|
||||
def async_unset_agent(self, agent_id: str) -> None:
|
||||
"""Unset the agent."""
|
||||
self._agents.pop(agent_id, None)
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
self.default_agent = agent
|
||||
|
@@ -10,11 +10,9 @@ from homeassistant.util.hass_dict import HassKey
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from .default_agent import DefaultAgent
|
||||
from .entity import ConversationEntity
|
||||
|
||||
DOMAIN = "conversation"
|
||||
DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
HOME_ASSISTANT_AGENT = "conversation.home_assistant"
|
||||
|
||||
ATTR_TEXT = "text"
|
||||
@@ -26,7 +24,6 @@ SERVICE_PROCESS = "process"
|
||||
SERVICE_RELOAD = "reload"
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[ConversationEntity]] = HassKey(DOMAIN)
|
||||
DATA_DEFAULT_ENTITY: HassKey[DefaultAgent] = HassKey(f"{DOMAIN}_default_entity")
|
||||
|
||||
|
||||
class ConversationEntityFeature(IntFlag):
|
||||
|
@@ -68,13 +68,9 @@ from homeassistant.helpers.event import async_track_state_added_domain
|
||||
from homeassistant.util import language as language_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .const import (
|
||||
DATA_DEFAULT_ENTITY,
|
||||
DEFAULT_EXPOSED_ATTRIBUTES,
|
||||
DOMAIN,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .const import DOMAIN, ConversationEntityFeature
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
@@ -83,6 +79,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
||||
REGEX_TYPE = type(re.compile(""))
|
||||
TRIGGER_CALLBACK_TYPE = Callable[
|
||||
[ConversationInput, RecognizeResult], Awaitable[str | None]
|
||||
@@ -209,9 +207,9 @@ async def async_setup_default_agent(
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up entity registry listener for the default agent."""
|
||||
entity = DefaultAgent(hass, config_intents)
|
||||
await entity_component.async_add_entities([entity])
|
||||
hass.data[DATA_DEFAULT_ENTITY] = entity
|
||||
agent = DefaultAgent(hass, config_intents)
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
|
||||
@core.callback
|
||||
def async_entity_state_listener(
|
||||
@@ -846,7 +844,7 @@ class DefaultAgent(ConversationEntity):
|
||||
context = {"domain": state.domain}
|
||||
if state.attributes:
|
||||
# Include some attributes
|
||||
for attr in DEFAULT_EXPOSED_ATTRIBUTES:
|
||||
for attr in _DEFAULT_EXPOSED_ATTRIBUTES:
|
||||
if attr not in state.attributes:
|
||||
continue
|
||||
context[attr] = state.attributes[attr]
|
||||
|
@@ -25,7 +25,7 @@ from .agent_manager import (
|
||||
async_get_agent,
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
|
||||
from .const import DATA_COMPONENT
|
||||
from .default_agent import (
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
@@ -169,7 +169,8 @@ async def websocket_list_sentences(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""List custom registered sentences."""
|
||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
sentences = []
|
||||
for trigger_data in agent.trigger_sentences:
|
||||
@@ -191,7 +192,8 @@ async def websocket_hass_agent_debug(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
) -> None:
|
||||
"""Return intents that would be matched by the default agent for a list of sentences."""
|
||||
agent = hass.data[DATA_DEFAULT_ENTITY]
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
# Return results for each sentence in the same order as the input.
|
||||
result_dicts: list[dict[str, Any] | None] = []
|
||||
|
@@ -1,4 +1,9 @@
|
||||
{
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:forum-outline"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"process": {
|
||||
"service": "mdi:message-processing"
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"dependencies": ["http", "intent"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.3"]
|
||||
}
|
||||
|
@@ -20,7 +20,8 @@ from homeassistant.helpers.script import ScriptRunResult
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType
|
||||
|
||||
from .const import DATA_DEFAULT_ENTITY, DOMAIN
|
||||
from .agent_manager import get_agent_manager
|
||||
from .const import DOMAIN
|
||||
from .models import ConversationInput
|
||||
|
||||
|
||||
@@ -123,4 +124,6 @@ async def async_attach_trigger(
|
||||
# two trigger copies for who will provide a response.
|
||||
return None
|
||||
|
||||
return hass.data[DATA_DEFAULT_ENTITY].register_trigger(sentences, call_action)
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
return agent.register_trigger(sentences, call_action)
|
||||
|
@@ -19,6 +19,7 @@
|
||||
"ssdp",
|
||||
"stream",
|
||||
"sun",
|
||||
"usage_prediction",
|
||||
"usb",
|
||||
"webhook",
|
||||
"zeroconf"
|
||||
|
@@ -43,3 +43,5 @@ class DelugeSensorType(enum.StrEnum):
|
||||
UPLOAD_SPEED_SENSOR = "upload_speed"
|
||||
PROTOCOL_TRAFFIC_UPLOAD_SPEED_SENSOR = "protocol_traffic_upload_speed"
|
||||
PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR = "protocol_traffic_download_speed"
|
||||
DOWNLOADING_COUNT_SENSOR = "downloading_count"
|
||||
SEEDING_COUNT_SENSOR = "seeding_count"
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import Counter
|
||||
from datetime import timedelta
|
||||
from ssl import SSLError
|
||||
from typing import Any
|
||||
@@ -14,11 +15,22 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER, DelugeGetSessionStatusKeys
|
||||
from .const import LOGGER, DelugeGetSessionStatusKeys, DelugeSensorType
|
||||
|
||||
type DelugeConfigEntry = ConfigEntry[DelugeDataUpdateCoordinator]
|
||||
|
||||
|
||||
def count_states(data: dict[str, Any]) -> dict[str, int]:
|
||||
"""Count the states of the provided torrents."""
|
||||
|
||||
counts = Counter(torrent[b"state"].decode() for torrent in data.values())
|
||||
|
||||
return {
|
||||
DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value: counts.get("Downloading", 0),
|
||||
DelugeSensorType.SEEDING_COUNT_SENSOR.value: counts.get("Seeding", 0),
|
||||
}
|
||||
|
||||
|
||||
class DelugeDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[dict[Platform, dict[str, Any]]]
|
||||
):
|
||||
@@ -39,19 +51,22 @@ class DelugeDataUpdateCoordinator(
|
||||
)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> dict[Platform, dict[str, Any]]:
|
||||
"""Get the latest data from Deluge and updates the state."""
|
||||
def _get_deluge_data(self):
|
||||
"""Get the latest data from Deluge."""
|
||||
|
||||
data = {}
|
||||
try:
|
||||
_data = await self.hass.async_add_executor_job(
|
||||
self.api.call,
|
||||
data["session_status"] = self.api.call(
|
||||
"core.get_session_status",
|
||||
[iter_member.value for iter_member in list(DelugeGetSessionStatusKeys)],
|
||||
)
|
||||
data[Platform.SENSOR] = {k.decode(): v for k, v in _data.items()}
|
||||
data[Platform.SWITCH] = await self.hass.async_add_executor_job(
|
||||
self.api.call, "core.get_torrents_status", {}, ["paused"]
|
||||
data["torrents_status_state"] = self.api.call(
|
||||
"core.get_torrents_status", {}, ["state"]
|
||||
)
|
||||
data["torrents_status_paused"] = self.api.call(
|
||||
"core.get_torrents_status", {}, ["paused"]
|
||||
)
|
||||
|
||||
except (
|
||||
ConnectionRefusedError,
|
||||
TimeoutError,
|
||||
@@ -66,4 +81,18 @@ class DelugeDataUpdateCoordinator(
|
||||
) from ex
|
||||
LOGGER.error("Unknown error connecting to Deluge: %s", ex)
|
||||
raise
|
||||
|
||||
return data
|
||||
|
||||
async def _async_update_data(self) -> dict[Platform, dict[str, Any]]:
|
||||
"""Get the latest data from Deluge and updates the state."""
|
||||
|
||||
deluge_data = await self.hass.async_add_executor_job(self._get_deluge_data)
|
||||
|
||||
data = {}
|
||||
data[Platform.SENSOR] = {
|
||||
k.decode(): v for k, v in deluge_data["session_status"].items()
|
||||
}
|
||||
data[Platform.SENSOR].update(count_states(deluge_data["torrents_status_state"]))
|
||||
data[Platform.SWITCH] = deluge_data["torrents_status_paused"]
|
||||
return data
|
||||
|
12
homeassistant/components/deluge/icons.json
Normal file
12
homeassistant/components/deluge/icons.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"downloading_count": {
|
||||
"default": "mdi:download"
|
||||
},
|
||||
"seeding_count": {
|
||||
"default": "mdi:upload"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -110,6 +110,18 @@ SENSOR_TYPES: tuple[DelugeSensorEntityDescription, ...] = (
|
||||
data, DelugeSensorType.PROTOCOL_TRAFFIC_DOWNLOAD_SPEED_SENSOR.value
|
||||
),
|
||||
),
|
||||
DelugeSensorEntityDescription(
|
||||
key=DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value,
|
||||
translation_key=DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value=lambda data: data[DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value],
|
||||
),
|
||||
DelugeSensorEntityDescription(
|
||||
key=DelugeSensorType.SEEDING_COUNT_SENSOR.value,
|
||||
translation_key=DelugeSensorType.SEEDING_COUNT_SENSOR.value,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value=lambda data: data[DelugeSensorType.SEEDING_COUNT_SENSOR.value],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -36,6 +36,10 @@
|
||||
"idle": "[%key:common::state::idle%]"
|
||||
}
|
||||
},
|
||||
"downloading_count": {
|
||||
"name": "Downloading count",
|
||||
"unit_of_measurement": "torrents"
|
||||
},
|
||||
"download_speed": {
|
||||
"name": "Download speed"
|
||||
},
|
||||
@@ -45,6 +49,10 @@
|
||||
"protocol_traffic_upload_speed": {
|
||||
"name": "Protocol traffic upload speed"
|
||||
},
|
||||
"seeding_count": {
|
||||
"name": "Seeding count",
|
||||
"unit_of_measurement": "[%key:component::deluge::entity::sensor::downloading_count::unit_of_measurement%]"
|
||||
},
|
||||
"upload_speed": {
|
||||
"name": "Upload speed"
|
||||
}
|
||||
|
23
homeassistant/components/derivative/diagnostics.py
Normal file
23
homeassistant/components/derivative/diagnostics.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Diagnostics support for derivative."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
registry = er.async_get(hass)
|
||||
entities = registry.entities.get_entries_for_config_entry_id(config_entry.entry_id)
|
||||
|
||||
return {
|
||||
"config_entry": config_entry.as_dict(),
|
||||
"entity": [entity.extended_dict for entity in entities],
|
||||
}
|
@@ -6,5 +6,6 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/derivative",
|
||||
"integration_type": "helper",
|
||||
"iot_class": "calculated"
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
@@ -227,15 +227,28 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
weight = calculate_weight(start, end, current_time)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Calculated new derivative as %f from %d segments",
|
||||
self.entity_id,
|
||||
derivative,
|
||||
len(self._state_list),
|
||||
)
|
||||
|
||||
return derivative
|
||||
|
||||
def _prune_state_list(self, current_time: datetime) -> None:
|
||||
# filter out all derivatives older than `time_window` from our window list
|
||||
old_len = len(self._state_list)
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (current_time - time_end).total_seconds() < self._time_window
|
||||
]
|
||||
_LOGGER.debug(
|
||||
"%s: Pruned %d elements from state list",
|
||||
self.entity_id,
|
||||
old_len - len(self._state_list),
|
||||
)
|
||||
|
||||
def _handle_invalid_source_state(self, state: State | None) -> bool:
|
||||
# Check the source state for unknown/unavailable condition. If unusable, write unknown/unavailable state and return false.
|
||||
@@ -292,6 +305,10 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
) -> None:
|
||||
"""Calculate derivative based on time and reschedule."""
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Recalculating derivative due to max_sub_interval time elapsed",
|
||||
self.entity_id,
|
||||
)
|
||||
self._prune_state_list(now)
|
||||
derivative = self._calc_derivative_from_state_list(now)
|
||||
self._write_native_value(derivative)
|
||||
@@ -300,6 +317,11 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
if derivative != 0:
|
||||
schedule_max_sub_interval_exceeded(source_state)
|
||||
|
||||
_LOGGER.debug(
|
||||
"%s: Scheduling max_sub_interval_callback in %s",
|
||||
self.entity_id,
|
||||
self._max_sub_interval,
|
||||
)
|
||||
self._cancel_max_sub_interval_exceeded_callback = async_call_later(
|
||||
self.hass,
|
||||
self._max_sub_interval,
|
||||
@@ -309,6 +331,9 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
@callback
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
_LOGGER.debug(
|
||||
"%s: New state reported event: %s", self.entity_id, event.data
|
||||
)
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if not self._handle_invalid_source_state(new_state):
|
||||
@@ -330,6 +355,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
_LOGGER.debug("%s: New state changed event: %s", self.entity_id, event.data)
|
||||
self._cancel_max_sub_interval_exceeded_callback()
|
||||
new_state = event.data["new_state"]
|
||||
if not self._handle_invalid_source_state(new_state):
|
||||
@@ -382,15 +408,32 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
/ Decimal(self._unit_prefix)
|
||||
* Decimal(self._unit_time)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"%s: Calculated new derivative segment as %f / %f / %f * %f = %f",
|
||||
self.entity_id,
|
||||
delta_value,
|
||||
elapsed_time,
|
||||
self._unit_prefix,
|
||||
self._unit_time,
|
||||
new_derivative,
|
||||
)
|
||||
|
||||
except ValueError as err:
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
_LOGGER.warning(
|
||||
"%s: While calculating derivative: %s", self.entity_id, err
|
||||
)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
"%s: Invalid state (%s > %s): %s",
|
||||
self.entity_id,
|
||||
old_value,
|
||||
new_state.state,
|
||||
err,
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
_LOGGER.error(
|
||||
"%s: Could not calculate derivative: %s", self.entity_id, err
|
||||
)
|
||||
|
||||
# For total inreasing sensors, the value is expected to continuously increase.
|
||||
# A negative derivative for a total increasing sensor likely indicates the
|
||||
@@ -400,6 +443,10 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
== SensorStateClass.TOTAL_INCREASING
|
||||
and new_derivative < 0
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"%s: Dropping sample as source total_increasing sensor decreased",
|
||||
self.entity_id,
|
||||
)
|
||||
return
|
||||
|
||||
# add latest derivative to the window list
|
||||
|
@@ -152,24 +152,28 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||
key="RAIN_COUNT_INCHES",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||
key="RAIN_RATE_MM",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_INCHES: SensorEntityDescription(
|
||||
key="RAIN_RATE_INCHES",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.INCHES_PER_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.LIGHTNING_DISTANCE_KM: SensorEntityDescription(
|
||||
key="LIGHTNING_DISTANCE_KM",
|
||||
@@ -230,6 +234,17 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
EcoWittSensorTypes.DISTANCE_MM: SensorEntityDescription(
|
||||
key="DISTANCE_MM",
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
native_unit_of_measurement=UnitOfLength.MILLIMETERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
EcoWittSensorTypes.HEAT_COUNT: SensorEntityDescription(
|
||||
key="HEAT_COUNT",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EcoWittSensorTypes.PM1: SensorEntityDescription(
|
||||
key="PM1",
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
@@ -238,6 +253,7 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
),
|
||||
EcoWittSensorTypes.PM4: SensorEntityDescription(
|
||||
key="PM4",
|
||||
device_class=SensorDeviceClass.PM4,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@@ -120,6 +120,14 @@ def _make_url_from_data(data: dict[str, str]) -> str:
|
||||
return f"{protocol}{address}"
|
||||
|
||||
|
||||
def _get_protocol_from_url(url: str) -> str:
|
||||
"""Get protocol from URL. Returns the configured protocol from URL or the default secure protocol."""
|
||||
return next(
|
||||
(k for k, v in PROTOCOL_MAP.items() if url.startswith(v)),
|
||||
DEFAULT_SECURE_PROTOCOL,
|
||||
)
|
||||
|
||||
|
||||
def _placeholders_from_device(device: ElkSystem) -> dict[str, str]:
|
||||
return {
|
||||
"mac_address": _short_mac(device.mac_address),
|
||||
@@ -205,6 +213,78 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
return await self.async_step_discovered_connection()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
existing_data = reconfigure_entry.data
|
||||
|
||||
if user_input is not None:
|
||||
validate_input_data = dict(user_input)
|
||||
validate_input_data[CONF_PREFIX] = existing_data.get(CONF_PREFIX, "")
|
||||
|
||||
try:
|
||||
info = await validate_input(
|
||||
validate_input_data, reconfigure_entry.unique_id
|
||||
)
|
||||
except TimeoutError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors[CONF_PASSWORD] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception during reconfiguration")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Discover the device at the provided address to obtain its MAC (unique_id)
|
||||
device = await async_discover_device(
|
||||
self.hass, validate_input_data[CONF_ADDRESS]
|
||||
)
|
||||
if device is not None and device.mac_address:
|
||||
await self.async_set_unique_id(dr.format_mac(device.mac_address))
|
||||
self._abort_if_unique_id_mismatch() # aborts if user tried to switch devices
|
||||
else:
|
||||
# If we cannot confirm identity, keep existing behavior (don't block reconfigure)
|
||||
await self.async_set_unique_id(reconfigure_entry.unique_id)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data_updates={
|
||||
**reconfigure_entry.data,
|
||||
CONF_HOST: info[CONF_HOST],
|
||||
CONF_USERNAME: validate_input_data[CONF_USERNAME],
|
||||
CONF_PASSWORD: validate_input_data[CONF_PASSWORD],
|
||||
CONF_PREFIX: info[CONF_PREFIX],
|
||||
},
|
||||
reason="reconfigure_successful",
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
default=existing_data.get(CONF_USERNAME, ""),
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
default="",
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_ADDRESS,
|
||||
default=hostname_from_url(existing_data[CONF_HOST]),
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_PROTOCOL,
|
||||
default=_get_protocol_from_url(existing_data[CONF_HOST]),
|
||||
): vol.In(ALL_PROTOCOLS),
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -249,12 +329,14 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
info = await validate_input(user_input, self.unique_id)
|
||||
except TimeoutError:
|
||||
except TimeoutError as ex:
|
||||
_LOGGER.debug("Connection timed out: %s", ex)
|
||||
return {"base": "cannot_connect"}, None
|
||||
except InvalidAuth:
|
||||
except InvalidAuth as ex:
|
||||
_LOGGER.debug("Invalid auth for %s: %s", user_input.get(CONF_HOST), ex)
|
||||
return {CONF_PASSWORD: "invalid_auth"}, None
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
_LOGGER.exception("Unexpected error validating input")
|
||||
return {"base": "unknown"}, None
|
||||
|
||||
if importing:
|
||||
|
@@ -14,7 +14,11 @@ from elkm1_lib.util import pretty_const
|
||||
from elkm1_lib.zones import Zone
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfElectricPotential
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -32,6 +36,16 @@ SERVICE_SENSOR_ZONE_BYPASS = "sensor_zone_bypass"
|
||||
SERVICE_SENSOR_ZONE_TRIGGER = "sensor_zone_trigger"
|
||||
UNDEFINED_TEMPERATURE = -40
|
||||
|
||||
_DEVICE_CLASS_MAP: dict[ZoneType, SensorDeviceClass] = {
|
||||
ZoneType.TEMPERATURE: SensorDeviceClass.TEMPERATURE,
|
||||
ZoneType.ANALOG_ZONE: SensorDeviceClass.VOLTAGE,
|
||||
}
|
||||
|
||||
_STATE_CLASS_MAP: dict[ZoneType, SensorStateClass] = {
|
||||
ZoneType.TEMPERATURE: SensorStateClass.MEASUREMENT,
|
||||
ZoneType.ANALOG_ZONE: SensorStateClass.MEASUREMENT,
|
||||
}
|
||||
|
||||
ELK_SET_COUNTER_SERVICE_SCHEMA: VolDictType = {
|
||||
vol.Required(ATTR_VALUE): vol.All(vol.Coerce(int), vol.Range(0, 65535))
|
||||
}
|
||||
@@ -248,6 +262,16 @@ class ElkZone(ElkSensor):
|
||||
return self._temperature_unit
|
||||
return None
|
||||
|
||||
@property
|
||||
def device_class(self) -> SensorDeviceClass | None:
|
||||
"""Return the device class of the sensor."""
|
||||
return _DEVICE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def state_class(self) -> SensorStateClass | None:
|
||||
"""Return the state class of the sensor."""
|
||||
return _STATE_CLASS_MAP.get(self._element.definition)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
|
@@ -17,8 +17,8 @@
|
||||
"address": "The IP address or domain or serial port if connecting via serial.",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"prefix": "A unique prefix (leave blank if you only have one ElkM1).",
|
||||
"temperature_unit": "The temperature unit ElkM1 uses."
|
||||
"prefix": "A unique prefix (leave blank if you only have one Elk-M1).",
|
||||
"temperature_unit": "The temperature unit Elk-M1 uses."
|
||||
}
|
||||
},
|
||||
"discovered_connection": {
|
||||
@@ -30,6 +30,16 @@
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"temperature_unit": "[%key:component::elkm1::config::step::manual_connection::data::temperature_unit%]"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure Elk-M1 Control",
|
||||
"description": "[%key:component::elkm1::config::step::manual_connection::description%]",
|
||||
"data": {
|
||||
"protocol": "[%key:component::elkm1::config::step::manual_connection::data::protocol%]",
|
||||
"address": "[%key:component::elkm1::config::step::manual_connection::data::address%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -42,8 +52,10 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "An ElkM1 with this prefix is already configured",
|
||||
"address_already_configured": "An ElkM1 with this address is already configured"
|
||||
"already_configured": "An Elk-M1 with this prefix is already configured",
|
||||
"address_already_configured": "An Elk-M1 with this address is already configured",
|
||||
"reconfigure_successful": "Successfully reconfigured Elk-M1 integration",
|
||||
"unique_id_mismatch": "Reconfigure should be used for the same device not a new one"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -69,7 +81,7 @@
|
||||
},
|
||||
"alarm_arm_home_instant": {
|
||||
"name": "Alarm arm home instant",
|
||||
"description": "Arms the ElkM1 in home instant mode.",
|
||||
"description": "Arms the Elk-M1 in home instant mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"name": "Code",
|
||||
@@ -79,7 +91,7 @@
|
||||
},
|
||||
"alarm_arm_night_instant": {
|
||||
"name": "Alarm arm night instant",
|
||||
"description": "Arms the ElkM1 in night instant mode.",
|
||||
"description": "Arms the Elk-M1 in night instant mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"name": "Code",
|
||||
@@ -89,7 +101,7 @@
|
||||
},
|
||||
"alarm_arm_vacation": {
|
||||
"name": "Alarm arm vacation",
|
||||
"description": "Arms the ElkM1 in vacation mode.",
|
||||
"description": "Arms the Elk-M1 in vacation mode.",
|
||||
"fields": {
|
||||
"code": {
|
||||
"name": "Code",
|
||||
@@ -99,7 +111,7 @@
|
||||
},
|
||||
"alarm_display_message": {
|
||||
"name": "Alarm display message",
|
||||
"description": "Displays a message on all of the ElkM1 keypads for an area.",
|
||||
"description": "Displays a message on all of the Elk-M1 keypads for an area.",
|
||||
"fields": {
|
||||
"clear": {
|
||||
"name": "Clear",
|
||||
@@ -135,7 +147,7 @@
|
||||
},
|
||||
"speak_phrase": {
|
||||
"name": "Speak phrase",
|
||||
"description": "Speaks a phrase. See list of phrases in ElkM1 ASCII Protocol documentation.",
|
||||
"description": "Speaks a phrase. See list of phrases in Elk-M1 ASCII Protocol documentation.",
|
||||
"fields": {
|
||||
"number": {
|
||||
"name": "Phrase number",
|
||||
@@ -149,7 +161,7 @@
|
||||
},
|
||||
"speak_word": {
|
||||
"name": "Speak word",
|
||||
"description": "Speaks a word. See list of words in ElkM1 ASCII Protocol documentation.",
|
||||
"description": "Speaks a word. See list of words in Elk-M1 ASCII Protocol documentation.",
|
||||
"fields": {
|
||||
"number": {
|
||||
"name": "Word number",
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/emoncms",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyemoncms==0.1.2"]
|
||||
"requirements": ["pyemoncms==0.1.3"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/emoncms_history",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyemoncms==0.1.2"]
|
||||
"requirements": ["pyemoncms==0.1.3"]
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "enocean",
|
||||
"name": "EnOcean",
|
||||
"codeowners": ["@bdurrer"],
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"iot_class": "local_push",
|
||||
|
@@ -52,7 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
f"[{eq3_config.mac_address}] Device could not be found"
|
||||
)
|
||||
|
||||
thermostat = Thermostat(mac_address=device) # type: ignore[arg-type]
|
||||
thermostat = Thermostat(device)
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
|
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==3.3.0"]
|
||||
"requirements": ["eq3btsmart==2.3.0"]
|
||||
}
|
||||
|
@@ -127,27 +127,39 @@ class EsphomeAssistSatellite(
|
||||
available_wake_words=[], active_wake_words=[], max_active_wake_words=1
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the pipeline to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
def _get_entity_id(self, suffix: str) -> str | None:
|
||||
"""Return the entity id for pipeline select, etc."""
|
||||
if self._entry_data.device_info is None:
|
||||
return None
|
||||
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-pipeline",
|
||||
f"{self._entry_data.device_info.mac_address}-{suffix}",
|
||||
)
|
||||
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the primary pipeline to use for the next conversation."""
|
||||
return self.get_pipeline_entity(self._active_pipeline_index)
|
||||
|
||||
def get_pipeline_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a pipeline by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"pipeline{id_suffix}")
|
||||
|
||||
def get_wake_word_entity(self, index: int) -> str | None:
|
||||
"""Return the entity ID of a wake word by index."""
|
||||
id_suffix = "" if index < 1 else f"_{index + 1}"
|
||||
return self._get_entity_id(f"wake_word{id_suffix}")
|
||||
|
||||
@property
|
||||
def vad_sensitivity_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the VAD sensitivity to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-vad_sensitivity",
|
||||
)
|
||||
return self._get_entity_id("vad_sensitivity")
|
||||
|
||||
@callback
|
||||
def async_get_configuration(
|
||||
@@ -235,6 +247,7 @@ class EsphomeAssistSatellite(
|
||||
)
|
||||
)
|
||||
|
||||
assert self._attr_supported_features is not None
|
||||
if feature_flags & VoiceAssistantFeature.ANNOUNCE:
|
||||
# Device supports announcements
|
||||
self._attr_supported_features |= (
|
||||
@@ -257,8 +270,8 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# Update wake word select when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_set_wake_word_callback(
|
||||
self.async_set_wake_word
|
||||
self._entry_data.async_register_assist_satellite_set_wake_words_callback(
|
||||
self.async_set_wake_words
|
||||
)
|
||||
)
|
||||
|
||||
@@ -482,8 +495,31 @@ class EsphomeAssistSatellite(
|
||||
# ANNOUNCEMENT format from media player
|
||||
self._update_tts_format()
|
||||
|
||||
# Run the pipeline
|
||||
_LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage)
|
||||
# Run the appropriate pipeline.
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
maybe_pipeline_index = 0
|
||||
while True:
|
||||
if not (ww_entity_id := self.get_wake_word_entity(maybe_pipeline_index)):
|
||||
break
|
||||
|
||||
if not (ww_state := self.hass.states.get(ww_entity_id)):
|
||||
continue
|
||||
|
||||
if ww_state.state == wake_word_phrase:
|
||||
# First match
|
||||
self._active_pipeline_index = maybe_pipeline_index
|
||||
break
|
||||
|
||||
# Try next wake word select
|
||||
maybe_pipeline_index += 1
|
||||
|
||||
_LOGGER.debug(
|
||||
"Running pipeline %s from %s to %s",
|
||||
self._active_pipeline_index + 1,
|
||||
start_stage,
|
||||
end_stage,
|
||||
)
|
||||
self._pipeline_task = self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_accept_pipeline_from_satellite(
|
||||
@@ -514,6 +550,7 @@ class EsphomeAssistSatellite(
|
||||
def handle_pipeline_finished(self) -> None:
|
||||
"""Handle when pipeline has finished running."""
|
||||
self._stop_udp_server()
|
||||
self._active_pipeline_index = 0
|
||||
_LOGGER.debug("Pipeline finished")
|
||||
|
||||
def handle_timer_event(
|
||||
@@ -542,15 +579,15 @@ class EsphomeAssistSatellite(
|
||||
self.tts_response_finished()
|
||||
|
||||
@callback
|
||||
def async_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Set active wake word and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = [wake_word_id]
|
||||
def async_set_wake_words(self, wake_word_ids: list[str]) -> None:
|
||||
"""Set active wake words and update config on satellite."""
|
||||
self._satellite_config.active_wake_words = wake_word_ids
|
||||
self.config_entry.async_create_background_task(
|
||||
self.hass,
|
||||
self.async_set_configuration(self._satellite_config),
|
||||
"esphome_voice_assistant_set_config",
|
||||
)
|
||||
_LOGGER.debug("Setting active wake word: %s", wake_word_id)
|
||||
_LOGGER.debug("Setting active wake word(s): %s", wake_word_ids)
|
||||
|
||||
def _update_tts_format(self) -> None:
|
||||
"""Update the TTS format from the first media player."""
|
||||
|
@@ -25,3 +25,5 @@ PROJECT_URLS = {
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
NO_WAKE_WORD: Final[str] = "no_wake_word"
|
||||
|
@@ -177,9 +177,10 @@ class RuntimeEntryData:
|
||||
assist_satellite_config_update_callbacks: list[
|
||||
Callable[[AssistSatelliteConfiguration], None]
|
||||
] = field(default_factory=list)
|
||||
assist_satellite_set_wake_word_callbacks: list[Callable[[str], None]] = field(
|
||||
default_factory=list
|
||||
assist_satellite_set_wake_words_callbacks: list[Callable[[list[str]], None]] = (
|
||||
field(default_factory=list)
|
||||
)
|
||||
assist_satellite_wake_words: dict[int, str] = field(default_factory=dict)
|
||||
device_id_to_name: dict[int, str] = field(default_factory=dict)
|
||||
entity_removal_callbacks: dict[EntityInfoKey, list[CALLBACK_TYPE]] = field(
|
||||
default_factory=dict
|
||||
@@ -501,19 +502,28 @@ class RuntimeEntryData:
|
||||
callback_(config)
|
||||
|
||||
@callback
|
||||
def async_register_assist_satellite_set_wake_word_callback(
|
||||
def async_register_assist_satellite_set_wake_words_callback(
|
||||
self,
|
||||
callback_: Callable[[str], None],
|
||||
callback_: Callable[[list[str]], None],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Register to receive callbacks when the Assist satellite's wake word is set."""
|
||||
self.assist_satellite_set_wake_word_callbacks.append(callback_)
|
||||
return partial(self.assist_satellite_set_wake_word_callbacks.remove, callback_)
|
||||
self.assist_satellite_set_wake_words_callbacks.append(callback_)
|
||||
return partial(self.assist_satellite_set_wake_words_callbacks.remove, callback_)
|
||||
|
||||
@callback
|
||||
def async_assist_satellite_set_wake_word(self, wake_word_id: str) -> None:
|
||||
"""Notify listeners that the Assist satellite wake word has been set."""
|
||||
for callback_ in self.assist_satellite_set_wake_word_callbacks.copy():
|
||||
callback_(wake_word_id)
|
||||
def async_assist_satellite_set_wake_word(
|
||||
self, wake_word_index: int, wake_word_id: str | None
|
||||
) -> None:
|
||||
"""Notify listeners that the Assist satellite wake words have been set."""
|
||||
if wake_word_id:
|
||||
self.assist_satellite_wake_words[wake_word_index] = wake_word_id
|
||||
else:
|
||||
self.assist_satellite_wake_words.pop(wake_word_index, None)
|
||||
|
||||
wake_word_ids = list(self.assist_satellite_wake_words.values())
|
||||
|
||||
for callback_ in self.assist_satellite_set_wake_words_callbacks.copy():
|
||||
callback_(wake_word_ids)
|
||||
|
||||
@callback
|
||||
def async_register_entity_removal_callback(
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user