mirror of
https://github.com/home-assistant/core.git
synced 2025-11-14 21:40:16 +00:00
Compare commits
115 Commits
immich/add
...
llm-task-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17a5815ca1 | ||
|
|
a8d4caab01 | ||
|
|
2be6acec03 | ||
|
|
fa21269f0d | ||
|
|
5f5869ffc6 | ||
|
|
7a2d99a450 | ||
|
|
6b669ce40c | ||
|
|
fdf4ed2aa5 | ||
|
|
1361d10cd7 | ||
|
|
8c7ba11493 | ||
|
|
29ce17abf4 | ||
|
|
c988d1ce36 | ||
|
|
ec02f6d010 | ||
|
|
9f19c4250a | ||
|
|
d7b583ae51 | ||
|
|
152e5254e2 | ||
|
|
3f8f7cd578 | ||
|
|
ed3fb62ffc | ||
|
|
1d14e1f018 | ||
|
|
2ac8901a0d | ||
|
|
6204fd5363 | ||
|
|
ce52ef64db | ||
|
|
059c12798d | ||
|
|
56aa809074 | ||
|
|
3d2dca5f0c | ||
|
|
cdb2b407be | ||
|
|
186ed451a9 | ||
|
|
761a0877e6 | ||
|
|
91bc56b15c | ||
|
|
d1e2c62433 | ||
|
|
524c16fbe1 | ||
|
|
2fdd3d66bc | ||
|
|
6a1e3b60ee | ||
|
|
434cd95a66 | ||
|
|
1a5bc2c7e0 | ||
|
|
a66e9a1a2c | ||
|
|
d880ce6bb4 | ||
|
|
c96023dcae | ||
|
|
2f8ad4d5bf | ||
|
|
038a848d53 | ||
|
|
ff17d79e73 | ||
|
|
a8201009f3 | ||
|
|
a349653282 | ||
|
|
355ee1178e | ||
|
|
30c5df3eaa | ||
|
|
10874af19a | ||
|
|
704118b3d0 | ||
|
|
7c575d0316 | ||
|
|
ab3f11bfe7 | ||
|
|
f0357539ad | ||
|
|
e70a2dd257 | ||
|
|
5ef99a15a5 | ||
|
|
6421973cd6 | ||
|
|
7201171eb5 | ||
|
|
1fb438fa6c | ||
|
|
89ae68c5af | ||
|
|
c78b66d5d5 | ||
|
|
d756cf91ce | ||
|
|
8d13bf93ab | ||
|
|
e86e793842 | ||
|
|
7e6bb021ce | ||
|
|
680b70aa29 | ||
|
|
8eebebc586 | ||
|
|
48e4624ba0 | ||
|
|
b0cf974b34 | ||
|
|
171f7c5f81 | ||
|
|
8807c530a9 | ||
|
|
28bd90aeb0 | ||
|
|
af1eccabce | ||
|
|
afc0a2789d | ||
|
|
78ed1097c4 | ||
|
|
2991726d35 | ||
|
|
c34596e54d | ||
|
|
74a92e2cd8 | ||
|
|
e19f178864 | ||
|
|
9dfbccf0cb | ||
|
|
64e503bc27 | ||
|
|
9d1e60cf7e | ||
|
|
4160521349 | ||
|
|
14c30ef2df | ||
|
|
e14cf8a5b9 | ||
|
|
30dbd5a900 | ||
|
|
25e6eab008 | ||
|
|
8bf562b7b6 | ||
|
|
7cb3c397b2 | ||
|
|
f44f2522ef | ||
|
|
8c9acf5a4d | ||
|
|
e46e7f5a81 | ||
|
|
c01f521199 | ||
|
|
4a15f12a0b | ||
|
|
8d24d775f1 | ||
|
|
aca0e69081 | ||
|
|
f4e5036275 | ||
|
|
59aba339d8 | ||
|
|
864e440685 | ||
|
|
2f6fcb5801 | ||
|
|
bdb6124aa3 | ||
|
|
613e2fd4b3 | ||
|
|
0e71ef3861 | ||
|
|
5076c10959 | ||
|
|
ab2fc4e9a6 | ||
|
|
e39edcc234 | ||
|
|
54c8e59bcd | ||
|
|
c806555879 | ||
|
|
4836930cb1 | ||
|
|
4a8faad62e | ||
|
|
ba69301dda | ||
|
|
724c349194 | ||
|
|
9346f8d658 | ||
|
|
0af41d9cb1 | ||
|
|
b02c0419b4 | ||
|
|
0bc6408137 | ||
|
|
3f1d2b1b71 | ||
|
|
bcfdee23e3 | ||
|
|
4a50f4ffc1 |
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.19
|
||||
uses: github/codeql-action/init@v3.29.0
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.19
|
||||
uses: github/codeql-action/analyze@v3.29.0
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
385
.github/workflows/detect-duplicate-issues.yml
vendored
Normal file
385
.github/workflows/detect-duplicate-issues.yml
vendored
Normal file
@@ -0,0 +1,385 @@
|
||||
name: Auto-detect duplicate issues
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
console.log('Event name:', context.eventName);
|
||||
console.log('Event action:', context.payload.action);
|
||||
console.log('Event payload keys:', Object.keys(context.payload));
|
||||
|
||||
// Check the specific label that was added
|
||||
const addedLabel = context.payload.label;
|
||||
if (!addedLabel) {
|
||||
console.log('No label found in labeled event payload');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Label added: ${addedLabel.name}`);
|
||||
|
||||
if (!addedLabel.name.startsWith('integration:')) {
|
||||
console.log('Added label is not an integration label, skipping duplicate detection');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Integration label added: ${addedLabel.name}`);
|
||||
|
||||
let currentIssue;
|
||||
let integrationLabels = [];
|
||||
|
||||
try {
|
||||
const issue = await github.rest.issues.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number
|
||||
});
|
||||
|
||||
currentIssue = issue.data;
|
||||
|
||||
// Check if potential-duplicate label already exists
|
||||
const hasPotentialDuplicateLabel = currentIssue.labels
|
||||
.some(label => label.name === 'potential-duplicate');
|
||||
|
||||
if (hasPotentialDuplicateLabel) {
|
||||
console.log('Issue already has potential-duplicate label, skipping duplicate detection');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
integrationLabels = currentIssue.labels
|
||||
.filter(label => label.name.startsWith('integration:'))
|
||||
.map(label => label.name);
|
||||
} catch (error) {
|
||||
core.error(`Failed to fetch issue #${context.payload.issue.number}:`, error.message);
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if we've already posted a duplicate detection comment recently
|
||||
let comments;
|
||||
try {
|
||||
comments = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
per_page: 10
|
||||
});
|
||||
} catch (error) {
|
||||
core.error('Failed to fetch comments:', error.message);
|
||||
// Continue anyway, worst case we might post a duplicate comment
|
||||
comments = { data: [] };
|
||||
}
|
||||
|
||||
// Check if we've already posted a duplicate detection comment
|
||||
const recentDuplicateComment = comments.data.find(comment =>
|
||||
comment.user && comment.user.login === 'github-actions[bot]' &&
|
||||
comment.body.includes('<!-- workflow: detect-duplicate-issues -->')
|
||||
);
|
||||
|
||||
if (recentDuplicateComment) {
|
||||
console.log('Already posted duplicate detection comment, skipping');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
core.setOutput('should_continue', 'true');
|
||||
core.setOutput('current_number', currentIssue.number);
|
||||
core.setOutput('current_title', currentIssue.title);
|
||||
core.setOutput('current_body', currentIssue.body);
|
||||
core.setOutput('current_url', currentIssue.html_url);
|
||||
core.setOutput('integration_labels', JSON.stringify(integrationLabels));
|
||||
|
||||
console.log(`Current issue: #${currentIssue.number}`);
|
||||
console.log(`Integration labels: ${integrationLabels.join(', ')}`);
|
||||
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
with:
|
||||
script: |
|
||||
const integrationLabels = JSON.parse(process.env.INTEGRATION_LABELS);
|
||||
const currentNumber = parseInt(process.env.CURRENT_NUMBER);
|
||||
|
||||
if (integrationLabels.length === 0) {
|
||||
console.log('No integration labels found, skipping duplicate detection');
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Use GitHub search API to find issues with matching integration labels
|
||||
console.log(`Searching for issues with integration labels: ${integrationLabels.join(', ')}`);
|
||||
|
||||
// Build search query for issues with any of the current integration labels
|
||||
const labelQueries = integrationLabels.map(label => `label:"${label}"`);
|
||||
|
||||
// Calculate date 6 months ago
|
||||
const sixMonthsAgo = new Date();
|
||||
sixMonthsAgo.setMonth(sixMonthsAgo.getMonth() - 6);
|
||||
const dateFilter = `created:>=${sixMonthsAgo.toISOString().split('T')[0]}`;
|
||||
|
||||
let searchQuery;
|
||||
|
||||
if (labelQueries.length === 1) {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]} ${dateFilter}`;
|
||||
} else {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')}) ${dateFilter}`;
|
||||
}
|
||||
|
||||
console.log(`Search query: ${searchQuery}`);
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await github.rest.search.issuesAndPullRequests({
|
||||
q: searchQuery,
|
||||
per_page: 15,
|
||||
sort: 'updated',
|
||||
order: 'desc'
|
||||
});
|
||||
} catch (error) {
|
||||
core.error('Failed to search for similar issues:', error.message);
|
||||
if (error.status === 403 && error.message.includes('rate limit')) {
|
||||
core.error('GitHub API rate limit exceeded');
|
||||
}
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter out the current issue, pull requests, and newer issues (higher numbers)
|
||||
const similarIssues = result.data.items
|
||||
.filter(item =>
|
||||
item.number !== currentNumber &&
|
||||
!item.pull_request &&
|
||||
item.number < currentNumber // Only include older issues (lower numbers)
|
||||
)
|
||||
.map(item => ({
|
||||
number: item.number,
|
||||
title: item.title,
|
||||
body: item.body,
|
||||
url: item.html_url,
|
||||
state: item.state,
|
||||
createdAt: item.created_at,
|
||||
updatedAt: item.updated_at,
|
||||
comments: item.comments,
|
||||
labels: item.labels.map(l => l.name)
|
||||
}));
|
||||
|
||||
console.log(`Found ${similarIssues.length} issues with matching integration labels`);
|
||||
console.log('Raw similar issues:', JSON.stringify(similarIssues.slice(0, 3), null, 2));
|
||||
|
||||
if (similarIssues.length === 0) {
|
||||
console.log('No similar issues found, setting has_similar to false');
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Similar issues found, setting has_similar to true');
|
||||
core.setOutput('has_similar', 'true');
|
||||
|
||||
// Clean the issue data to prevent JSON parsing issues
|
||||
const cleanedIssues = similarIssues.slice(0, 15).map(item => {
|
||||
// Handle body with improved truncation and null handling
|
||||
let cleanBody = '';
|
||||
if (item.body && typeof item.body === 'string') {
|
||||
// Remove control characters
|
||||
const cleaned = item.body.replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||
// Truncate to 1000 characters and add ellipsis if needed
|
||||
cleanBody = cleaned.length > 1000
|
||||
? cleaned.substring(0, 1000) + '...'
|
||||
: cleaned;
|
||||
}
|
||||
|
||||
return {
|
||||
number: item.number,
|
||||
title: item.title.replace(/[\u0000-\u001F\u007F-\u009F]/g, ''), // Remove control characters
|
||||
body: cleanBody,
|
||||
url: item.url,
|
||||
state: item.state,
|
||||
createdAt: item.createdAt,
|
||||
updatedAt: item.updatedAt,
|
||||
comments: item.comments,
|
||||
labels: item.labels
|
||||
};
|
||||
});
|
||||
|
||||
console.log(`Cleaned issues count: ${cleanedIssues.length}`);
|
||||
console.log('First cleaned issue:', JSON.stringify(cleanedIssues[0], null, 2));
|
||||
|
||||
core.setOutput('similar_issues', JSON.stringify(cleanedIssues));
|
||||
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
You are a Home Assistant issue duplicate detector. Your task is to identify TRUE DUPLICATES - issues that report the EXACT SAME problem, not just similar or related issues.
|
||||
|
||||
CRITICAL: An issue is ONLY a duplicate if:
|
||||
- It describes the SAME problem with the SAME root cause
|
||||
- Issues about the same integration but different problems are NOT duplicates
|
||||
- Issues with similar symptoms but different causes are NOT duplicates
|
||||
|
||||
Important considerations:
|
||||
- Open issues are more relevant than closed ones for duplicate detection
|
||||
- Recently updated issues may indicate ongoing work or discussion
|
||||
- Issues with more comments are generally more relevant and active
|
||||
- Older closed issues might be resolved differently than newer approaches
|
||||
- Consider the time between issues - very old issues may have different contexts
|
||||
|
||||
Rules:
|
||||
1. ONLY mark as duplicate if the issues describe IDENTICAL problems
|
||||
2. Look for issues that report the same problem or request the same functionality
|
||||
3. Different error messages = NOT a duplicate (even if same integration)
|
||||
4. For CLOSED issues, only mark as duplicate if they describe the EXACT same problem
|
||||
5. For OPEN issues, use a lower threshold (90%+ similarity)
|
||||
6. Prioritize issues with higher comment counts as they indicate more activity/relevance
|
||||
7. When in doubt, do NOT mark as duplicate
|
||||
8. Return ONLY a JSON array of issue numbers that are duplicates
|
||||
9. If no duplicates are found, return an empty array: []
|
||||
10. Maximum 5 potential duplicates, prioritize open issues with comments
|
||||
11. Consider the age of issues - prefer recent duplicates over very old ones
|
||||
|
||||
Example response format:
|
||||
[1234, 5678, 9012]
|
||||
|
||||
prompt: |
|
||||
Current issue (just created):
|
||||
Title: ${{ steps.extract.outputs.current_title }}
|
||||
Body: ${{ steps.extract.outputs.current_body }}
|
||||
|
||||
Other issues to compare against (each includes state, creation date, last update, and comment count):
|
||||
${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
Analyze these issues and identify which ones describe IDENTICAL problems and thus are duplicates of the current issue. When sorting them, consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
|
||||
|
||||
max-tokens: 100
|
||||
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
with:
|
||||
script: |
|
||||
const aiResponse = process.env.AI_RESPONSE;
|
||||
|
||||
console.log('Raw AI response:', JSON.stringify(aiResponse));
|
||||
|
||||
let duplicateNumbers = [];
|
||||
try {
|
||||
// Clean the response of any potential control characters
|
||||
const cleanResponse = aiResponse.trim().replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||
console.log('Cleaned AI response:', cleanResponse);
|
||||
|
||||
duplicateNumbers = JSON.parse(cleanResponse);
|
||||
|
||||
// Ensure it's an array and contains only numbers
|
||||
if (!Array.isArray(duplicateNumbers)) {
|
||||
console.log('AI response is not an array, trying to extract numbers');
|
||||
const numberMatches = cleanResponse.match(/\d+/g);
|
||||
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||
}
|
||||
|
||||
// Filter to only valid numbers
|
||||
duplicateNumbers = duplicateNumbers.filter(n => typeof n === 'number' && !isNaN(n));
|
||||
|
||||
} catch (error) {
|
||||
console.log('Failed to parse AI response as JSON:', error.message);
|
||||
console.log('Raw response:', aiResponse);
|
||||
|
||||
// Fallback: try to extract numbers from the response
|
||||
const numberMatches = aiResponse.match(/\d+/g);
|
||||
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||
console.log('Extracted numbers as fallback:', duplicateNumbers);
|
||||
}
|
||||
|
||||
if (!Array.isArray(duplicateNumbers) || duplicateNumbers.length === 0) {
|
||||
console.log('No duplicates detected by AI');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`AI detected ${duplicateNumbers.length} potential duplicates: ${duplicateNumbers.join(', ')}`);
|
||||
|
||||
// Get details of detected duplicates
|
||||
const similarIssues = JSON.parse(process.env.SIMILAR_ISSUES);
|
||||
const duplicates = similarIssues.filter(issue => duplicateNumbers.includes(issue.number));
|
||||
|
||||
if (duplicates.length === 0) {
|
||||
console.log('No matching issues found for detected numbers');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create comment with duplicate detection results
|
||||
const duplicateLinks = duplicates.map(issue => `- [#${issue.number}: ${issue.title}](${issue.url})`).join('\n');
|
||||
|
||||
const commentBody = [
|
||||
'<!-- workflow: detect-duplicate-issues -->',
|
||||
'### 🔍 **Potential duplicate detection**',
|
||||
'',
|
||||
'I\'ve analyzed similar issues and found the following potential duplicates:',
|
||||
'',
|
||||
duplicateLinks,
|
||||
'',
|
||||
'**What to do next:**',
|
||||
'1. Please review these issues to see if they match your issue',
|
||||
'2. If you find an existing issue that covers your problem:',
|
||||
' - Consider closing this issue',
|
||||
' - Add your findings or 👍 on the existing issue instead',
|
||||
'3. If your issue is different or adds new aspects, please clarify how it differs',
|
||||
'',
|
||||
'This helps keep our issues organized and ensures similar issues are consolidated for better visibility.',
|
||||
'',
|
||||
'*This message was generated automatically by our duplicate detection system.*'
|
||||
].join('\n');
|
||||
|
||||
try {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
body: commentBody
|
||||
});
|
||||
|
||||
console.log(`Posted duplicate detection comment with ${duplicates.length} potential duplicates`);
|
||||
|
||||
// Add the potential-duplicate label
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
labels: ['potential-duplicate']
|
||||
});
|
||||
|
||||
console.log('Added potential-duplicate label to the issue');
|
||||
} catch (error) {
|
||||
core.error('Failed to post duplicate detection comment or add label:', error.message);
|
||||
if (error.status === 403) {
|
||||
core.error('Permission denied or rate limit exceeded');
|
||||
}
|
||||
// Don't throw - we've done the analysis, just couldn't post the result
|
||||
}
|
||||
193
.github/workflows/detect-non-english-issues.yml
vendored
Normal file
193
.github/workflows/detect-non-english-issues.yml
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
name: Auto-detect non-English issues
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-language:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_USER_TYPE: ${{ github.event.issue.user.type }}
|
||||
with:
|
||||
script: |
|
||||
// Get the issue details from environment variables
|
||||
const issueNumber = process.env.ISSUE_NUMBER;
|
||||
const issueTitle = process.env.ISSUE_TITLE || '';
|
||||
const issueBody = process.env.ISSUE_BODY || '';
|
||||
const userType = process.env.ISSUE_USER_TYPE;
|
||||
|
||||
// Skip language detection for bot users
|
||||
if (userType === 'Bot') {
|
||||
console.log('Skipping language detection for bot user');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Checking language for issue #${issueNumber}`);
|
||||
console.log(`Title: ${issueTitle}`);
|
||||
|
||||
// Combine title and body for language detection
|
||||
const fullText = `${issueTitle}\n\n${issueBody}`;
|
||||
|
||||
// Check if the text is too short to reliably detect language
|
||||
if (fullText.trim().length < 20) {
|
||||
console.log('Text too short for reliable language detection');
|
||||
core.setOutput('should_continue', 'false'); // Skip processing for very short text
|
||||
return;
|
||||
}
|
||||
|
||||
core.setOutput('issue_number', issueNumber);
|
||||
core.setOutput('issue_text', fullText);
|
||||
core.setOutput('should_continue', 'true');
|
||||
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
You are a language detection system. Your task is to determine if the provided text is written in English or another language.
|
||||
|
||||
Rules:
|
||||
1. Analyze the text and determine the primary language of the USER'S DESCRIPTION only
|
||||
2. IGNORE markdown headers (lines starting with #, ##, ###, etc.) as these are from issue templates, not user input
|
||||
3. IGNORE all code blocks (text between ``` or ` markers) as they may contain system-generated error messages in other languages
|
||||
4. IGNORE error messages, logs, and system output even if not in code blocks - these often appear in the user's system language
|
||||
5. Consider technical terms, code snippets, URLs, and file paths as neutral (they don't indicate non-English)
|
||||
6. Focus ONLY on the actual sentences and descriptions written by the user explaining their issue
|
||||
7. If the user's explanation/description is in English but includes non-English error messages or logs, consider it ENGLISH
|
||||
8. Return ONLY a JSON object with two fields:
|
||||
- "is_english": boolean (true if the user's description is primarily in English, false otherwise)
|
||||
- "detected_language": string (the name of the detected language, e.g., "English", "Spanish", "Chinese", etc.)
|
||||
9. Be lenient - if the user's explanation is in English with non-English system output, it's still English
|
||||
10. Common programming terms, error messages, and technical jargon should not be considered as non-English
|
||||
11. If you cannot reliably determine the language, set detected_language to "undefined"
|
||||
|
||||
Example response:
|
||||
{"is_english": false, "detected_language": "Spanish"}
|
||||
|
||||
prompt: |
|
||||
Please analyze the following issue text and determine if it is written in English:
|
||||
|
||||
${{ steps.detect_language.outputs.issue_text }}
|
||||
|
||||
max-tokens: 50
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
with:
|
||||
script: |
|
||||
const issueNumber = parseInt(process.env.ISSUE_NUMBER);
|
||||
const aiResponse = process.env.AI_RESPONSE;
|
||||
|
||||
console.log('AI language detection response:', aiResponse);
|
||||
|
||||
let languageResult;
|
||||
try {
|
||||
languageResult = JSON.parse(aiResponse.trim());
|
||||
|
||||
// Validate the response structure
|
||||
if (!languageResult || typeof languageResult.is_english !== 'boolean') {
|
||||
throw new Error('Invalid response structure');
|
||||
}
|
||||
} catch (error) {
|
||||
core.error(`Failed to parse AI response: ${error.message}`);
|
||||
console.log('Raw AI response:', aiResponse);
|
||||
|
||||
// Log more details for debugging
|
||||
core.warning('Defaulting to English due to parsing error');
|
||||
|
||||
// Default to English if we can't parse the response
|
||||
return;
|
||||
}
|
||||
|
||||
if (languageResult.is_english) {
|
||||
console.log('Issue is in English, no action needed');
|
||||
return;
|
||||
}
|
||||
|
||||
// If language is undefined or not detected, skip processing
|
||||
if (!languageResult.detected_language || languageResult.detected_language === 'undefined') {
|
||||
console.log('Language could not be determined, skipping processing');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Issue detected as non-English: ${languageResult.detected_language}`);
|
||||
|
||||
// Post comment explaining the language requirement
|
||||
const commentBody = [
|
||||
'<!-- workflow: detect-non-english-issues -->',
|
||||
'### 🌐 Non-English issue detected',
|
||||
'',
|
||||
`This issue appears to be written in **${languageResult.detected_language}** rather than English.`,
|
||||
'',
|
||||
'The Home Assistant project uses English as the primary language for issues to ensure that everyone in our international community can participate and help resolve issues. This allows any of our thousands of contributors to jump in and provide assistance.',
|
||||
'',
|
||||
'**What to do:**',
|
||||
'1. Re-create the issue using the English language',
|
||||
'2. If you need help with translation, consider using:',
|
||||
' - Translation tools like Google Translate',
|
||||
' - AI assistants like ChatGPT or Claude',
|
||||
'',
|
||||
'This helps our community provide the best possible support and ensures your issue gets the attention it deserves from our global contributor base.',
|
||||
'',
|
||||
'Thank you for your understanding! 🙏'
|
||||
].join('\n');
|
||||
|
||||
try {
|
||||
// Add comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body: commentBody
|
||||
});
|
||||
|
||||
console.log('Posted language requirement comment');
|
||||
|
||||
// Add non-english label
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ['non-english']
|
||||
});
|
||||
|
||||
console.log('Added non-english label');
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
state: 'closed',
|
||||
state_reason: 'not_planned'
|
||||
});
|
||||
|
||||
console.log('Closed the issue');
|
||||
|
||||
} catch (error) {
|
||||
core.error('Failed to process non-English issue:', error.message);
|
||||
if (error.status === 403) {
|
||||
core.error('Permission denied or rate limit exceeded');
|
||||
}
|
||||
}
|
||||
@@ -65,8 +65,8 @@ homeassistant.components.aladdin_connect.*
|
||||
homeassistant.components.alarm_control_panel.*
|
||||
homeassistant.components.alert.*
|
||||
homeassistant.components.alexa.*
|
||||
homeassistant.components.alexa_devices.*
|
||||
homeassistant.components.alpha_vantage.*
|
||||
homeassistant.components.amazon_devices.*
|
||||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
homeassistant.components.ambient_network.*
|
||||
|
||||
10
CODEOWNERS
generated
10
CODEOWNERS
generated
@@ -57,6 +57,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/aemet/ @Noltari
|
||||
/homeassistant/components/agent_dvr/ @ispysoftware
|
||||
/tests/components/agent_dvr/ @ispysoftware
|
||||
/homeassistant/components/ai_task/ @home-assistant/core
|
||||
/tests/components/ai_task/ @home-assistant/core
|
||||
/homeassistant/components/air_quality/ @home-assistant/core
|
||||
/tests/components/air_quality/ @home-assistant/core
|
||||
/homeassistant/components/airgradient/ @airgradienthq @joostlek
|
||||
@@ -89,8 +91,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/alert/ @home-assistant/core @frenck
|
||||
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/homeassistant/components/amazon_devices/ @chemelli74
|
||||
/tests/components/amazon_devices/ @chemelli74
|
||||
/homeassistant/components/alexa_devices/ @chemelli74
|
||||
/tests/components/alexa_devices/ @chemelli74
|
||||
/homeassistant/components/amazon_polly/ @jschlyter
|
||||
/homeassistant/components/amberelectric/ @madpilot
|
||||
/tests/components/amberelectric/ @madpilot
|
||||
@@ -1274,8 +1276,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rehlko/ @bdraco @peterager
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
/tests/components/remote_calendar/ @Thomas55555
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555 @allenporter
|
||||
/tests/components/remote_calendar/ @Thomas55555 @allenporter
|
||||
/homeassistant/components/renault/ @epenet
|
||||
/tests/components/renault/ @epenet
|
||||
/homeassistant/components/renson/ @jimmyd-be
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
"""Enum backports from standard lib.
|
||||
|
||||
This file contained the backport of the StrEnum of Python 3.11.
|
||||
|
||||
Since we have dropped support for Python 3.10, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum as _StrEnum
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
|
||||
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
@@ -1,31 +0,0 @@
|
||||
"""Functools backports from standard lib.
|
||||
|
||||
This file contained the backport of the cached_property implementation of Python 3.12.
|
||||
|
||||
Since we have dropped support for Python 3.11, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# pylint: disable-next=hass-deprecated-import
|
||||
from functools import cached_property as _cached_property, partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
|
||||
_DEPRECATED_cached_property = DeprecatedAlias(
|
||||
_cached_property, "functools.cached_property", "2025.5"
|
||||
)
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Amazon",
|
||||
"integrations": [
|
||||
"alexa",
|
||||
"amazon_devices",
|
||||
"alexa_devices",
|
||||
"amazon_polly",
|
||||
"aws",
|
||||
"aws_s3",
|
||||
|
||||
@@ -6,7 +6,7 @@ from jaraco.abode.exceptions import Exception as AbodeException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
@@ -70,6 +70,7 @@ def _trigger_automation(call: ServiceCall) -> None:
|
||||
dispatcher_send(call.hass, signal)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
|
||||
99
homeassistant/components/ai_task/__init__.py
Normal file
99
homeassistant/components/ai_task/__init__.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Integration to offer AI tasks to Home Assistant."""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, storage
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import UNDEFINED, ConfigType, UndefinedType
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES, DOMAIN
|
||||
from .entity import AITaskEntity
|
||||
from .http import async_setup as async_setup_conversation_http
|
||||
from .task import GenTextTask, GenTextTaskResult, async_generate_text
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AITaskEntity",
|
||||
"GenTextTask",
|
||||
"GenTextTaskResult",
|
||||
"async_generate_text",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Register the process service."""
|
||||
entity_component = EntityComponent[AITaskEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
hass.data[DATA_PREFERENCES] = AITaskPreferences(hass)
|
||||
await hass.data[DATA_PREFERENCES].async_load()
|
||||
async_setup_conversation_http(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
gen_text_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
self._store: storage.Store[dict[str, str | None]] = storage.Store(
|
||||
hass, 1, DOMAIN
|
||||
)
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the data from the store."""
|
||||
data = await self._store.async_load()
|
||||
if data is None:
|
||||
return
|
||||
self.gen_text_entity_id = data.get("gen_text_entity_id")
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_text_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (("gen_text_entity_id", gen_text_entity_id),):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
changed = True
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
self._store.async_delay_save(
|
||||
lambda: {
|
||||
"gen_text_entity_id": self.gen_text_entity_id,
|
||||
},
|
||||
10,
|
||||
)
|
||||
|
||||
@callback
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Get the current preferences."""
|
||||
return {
|
||||
"gen_text_entity_id": self.gen_text_entity_id,
|
||||
}
|
||||
21
homeassistant/components/ai_task/const.py
Normal file
21
homeassistant/components/ai_task/const.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Constants for the AI Task integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from . import AITaskPreferences
|
||||
from .entity import AITaskEntity
|
||||
|
||||
DOMAIN = "ai_task"
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AITaskEntity]] = HassKey(DOMAIN)
|
||||
DATA_PREFERENCES: HassKey[AITaskPreferences] = HassKey(f"{DOMAIN}_preferences")
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a Home Assistant expert and help users with their tasks."
|
||||
)
|
||||
95
homeassistant/components/ai_task/entity.py
Normal file
95
homeassistant/components/ai_task/entity.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Entity for the AI Task integration."""
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
import contextlib
|
||||
from typing import final
|
||||
|
||||
from homeassistant.components.conversation import (
|
||||
ChatLog,
|
||||
UserContent,
|
||||
async_get_chat_log,
|
||||
)
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DEFAULT_SYSTEM_PROMPT, DOMAIN
|
||||
from .task import GenTextTask, GenTextTaskResult
|
||||
|
||||
|
||||
class AITaskEntity(RestoreEntity):
|
||||
"""Entity that supports conversations."""
|
||||
|
||||
_attr_should_poll = False
|
||||
__last_activity: str | None = None
|
||||
|
||||
@property
|
||||
@final
|
||||
def state(self) -> str | None:
|
||||
"""Return the state of the entity."""
|
||||
if self.__last_activity is None:
|
||||
return None
|
||||
return self.__last_activity
|
||||
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the entity is added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
state = await self.async_get_last_state()
|
||||
if (
|
||||
state is not None
|
||||
and state.state is not None
|
||||
and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
):
|
||||
self.__last_activity = state.state
|
||||
|
||||
@final
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_get_ai_task_chat_log(
|
||||
self,
|
||||
task: GenTextTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_session(self.hass) as session,
|
||||
async_get_chat_log(
|
||||
self.hass,
|
||||
session,
|
||||
None,
|
||||
) as chat_log,
|
||||
):
|
||||
await chat_log.async_provide_llm_data(
|
||||
llm.LLMContext(
|
||||
platform=self.platform.domain,
|
||||
context=None,
|
||||
language=None,
|
||||
assistant=DOMAIN,
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(UserContent(task.instructions))
|
||||
|
||||
yield chat_log
|
||||
|
||||
@final
|
||||
async def internal_async_generate_text(
|
||||
self,
|
||||
task: GenTextTask,
|
||||
) -> GenTextTaskResult:
|
||||
"""Run a gen text task."""
|
||||
self.__last_activity = dt_util.utcnow().isoformat()
|
||||
self.async_write_ha_state()
|
||||
async with self._async_get_ai_task_chat_log(task) as chat_log:
|
||||
return await self._async_generate_text(task, chat_log)
|
||||
|
||||
async def _async_generate_text(
|
||||
self,
|
||||
task: GenTextTask,
|
||||
chat_log: ChatLog,
|
||||
) -> GenTextTaskResult:
|
||||
"""Handle a gen text task."""
|
||||
raise NotImplementedError
|
||||
82
homeassistant/components/ai_task/http.py
Normal file
82
homeassistant/components/ai_task/http.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""HTTP endpoint for AI Task integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DATA_PREFERENCES
|
||||
from .task import async_generate_text
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the HTTP API for the conversation integration."""
|
||||
websocket_api.async_register_command(hass, websocket_generate_text)
|
||||
websocket_api.async_register_command(hass, websocket_get_preferences)
|
||||
websocket_api.async_register_command(hass, websocket_set_preferences)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/generate_text",
|
||||
vol.Required("task_name"): str,
|
||||
vol.Optional("entity_id"): str,
|
||||
vol.Required("instructions"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
async def websocket_generate_text(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Run a generate text task."""
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
try:
|
||||
result = await async_generate_text(hass=hass, **msg)
|
||||
except ValueError as err:
|
||||
connection.send_error(msg_id, websocket_api.const.ERR_UNKNOWN_ERROR, str(err))
|
||||
return
|
||||
connection.send_result(msg_id, result.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/get",
|
||||
}
|
||||
)
|
||||
@callback
|
||||
def websocket_get_preferences(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get AI task preferences."""
|
||||
preferences = hass.data[DATA_PREFERENCES]
|
||||
connection.send_result(msg["id"], preferences.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_text_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_set_preferences(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set AI task preferences."""
|
||||
preferences = hass.data[DATA_PREFERENCES]
|
||||
msg.pop("type")
|
||||
msg_id = msg.pop("id")
|
||||
preferences.async_set_preferences(**msg)
|
||||
connection.send_result(msg_id, preferences.as_dict())
|
||||
9
homeassistant/components/ai_task/manifest.json
Normal file
9
homeassistant/components/ai_task/manifest.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"domain": "ai_task",
|
||||
"name": "AI Task",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["conversation"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ai_task",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
68
homeassistant/components/ai_task/task.py
Normal file
68
homeassistant/components/ai_task/task.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""AI tasks to be handled by agents."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_COMPONENT, DATA_PREFERENCES
|
||||
|
||||
|
||||
async def async_generate_text(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
) -> GenTextTaskResult:
|
||||
"""Run a task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_text_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise ValueError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise ValueError(f"AI Task entity {entity_id} not found")
|
||||
|
||||
return await entity.internal_async_generate_text(
|
||||
GenTextTask(
|
||||
name=task_name,
|
||||
instructions=instructions,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenTextTask:
|
||||
"""Gen text task to be processed."""
|
||||
|
||||
name: str
|
||||
"""Name of the task."""
|
||||
|
||||
instructions: str
|
||||
"""Instructions on what needs to be done."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenTextTask {self.name}: {id(self)}>"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GenTextTaskResult:
|
||||
"""Result of gen text task."""
|
||||
|
||||
conversation_id: str
|
||||
"""Unique identifier for the conversation."""
|
||||
|
||||
result: str
|
||||
"""Result of the task."""
|
||||
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"result": self.result,
|
||||
}
|
||||
@@ -37,30 +37,35 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="radonShortTermAvg",
|
||||
native_unit_of_measurement="Bq/m³",
|
||||
translation_key="radon",
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"temp": SensorEntityDescription(
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"sla": SensorEntityDescription(
|
||||
key="sla",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -68,40 +73,47 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -110,18 +122,21 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Amazon Devices integration."""
|
||||
"""Alexa Devices integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -13,7 +13,7 @@ PLATFORMS = [
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Set up Amazon Devices platform."""
|
||||
"""Set up Alexa Devices platform."""
|
||||
|
||||
coordinator = AmazonDevicesCoordinator(hass, entry)
|
||||
|
||||
@@ -26,7 +26,7 @@ PARALLEL_UPDATES = 0
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Amazon Devices binary sensor entity description."""
|
||||
"""Alexa Devices binary sensor entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
|
||||
@@ -52,7 +52,7 @@ async def async_setup_entry(
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices binary sensors based on a config entry."""
|
||||
"""Set up Alexa Devices binary sensors based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Config flow for Amazon Devices integration."""
|
||||
"""Config flow for Alexa Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -17,7 +17,7 @@ from .const import CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Amazon Devices."""
|
||||
"""Handle a config flow for Alexa Devices."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Amazon Devices constants."""
|
||||
"""Alexa Devices constants."""
|
||||
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "amazon_devices"
|
||||
DOMAIN = "alexa_devices"
|
||||
CONF_LOGIN_DATA = "login_data"
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for Amazon Devices."""
|
||||
"""Support for Alexa Devices."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
@@ -23,7 +23,7 @@ type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
|
||||
class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
"""Base coordinator for Amazon Devices."""
|
||||
"""Base coordinator for Alexa Devices."""
|
||||
|
||||
config_entry: AmazonConfigEntry
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Diagnostics support for Amazon Devices integration."""
|
||||
"""Diagnostics support for Alexa Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Defines a base Amazon Devices entity."""
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
@@ -12,7 +12,7 @@ from .coordinator import AmazonDevicesCoordinator
|
||||
|
||||
|
||||
class AmazonEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
|
||||
"""Defines a base Amazon Devices entity."""
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"domain": "amazon_devices",
|
||||
"name": "Amazon Devices",
|
||||
"domain": "alexa_devices",
|
||||
"name": "Alexa Devices",
|
||||
"codeowners": ["@chemelli74"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/amazon_devices",
|
||||
"documentation": "https://www.home-assistant.io/integrations/alexa_devices",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.0.6"]
|
||||
"requirements": ["aioamazondevices==3.1.4"]
|
||||
}
|
||||
@@ -7,6 +7,7 @@ from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -20,8 +21,9 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonNotifyEntityDescription(NotifyEntityDescription):
|
||||
"""Amazon Devices notify entity description."""
|
||||
"""Alexa Devices notify entity description."""
|
||||
|
||||
is_supported: Callable[[AmazonDevice], bool] = lambda _device: True
|
||||
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
|
||||
subkey: str
|
||||
|
||||
@@ -31,6 +33,7 @@ NOTIFY: Final = (
|
||||
key="speak",
|
||||
translation_key="speak",
|
||||
subkey="AUDIO_PLAYER",
|
||||
is_supported=lambda _device: _device.device_family != SPEAKER_GROUP_FAMILY,
|
||||
method=lambda api, device, message: api.call_alexa_speak(device, message),
|
||||
),
|
||||
AmazonNotifyEntityDescription(
|
||||
@@ -49,7 +52,7 @@ async def async_setup_entry(
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices notification entity based on a config entry."""
|
||||
"""Set up Alexa Devices notification entity based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@@ -58,6 +61,7 @@ async def async_setup_entry(
|
||||
for sensor_desc in NOTIFY
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
and sensor_desc.is_supported(coordinator.data[serial_num])
|
||||
)
|
||||
|
||||
|
||||
@@ -12,16 +12,16 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"country": "[%key:component::amazon_devices::common::data_country%]",
|
||||
"country": "[%key:component::alexa_devices::common::data_country%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"code": "[%key:component::amazon_devices::common::data_description_code%]"
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"country": "[%key:component::amazon_devices::common::data_description_country%]",
|
||||
"username": "[%key:component::amazon_devices::common::data_description_username%]",
|
||||
"password": "[%key:component::amazon_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::amazon_devices::common::data_description_code%]"
|
||||
"country": "[%key:component::alexa_devices::common::data_description_country%]",
|
||||
"username": "[%key:component::alexa_devices::common::data_description_username%]",
|
||||
"password": "[%key:component::alexa_devices::common::data_description_password%]",
|
||||
"code": "[%key:component::alexa_devices::common::data_description_code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -20,7 +20,7 @@ PARALLEL_UPDATES = 1
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Amazon Devices switch entity description."""
|
||||
"""Alexa Devices switch entity description."""
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
subkey: str
|
||||
@@ -43,7 +43,7 @@ async def async_setup_entry(
|
||||
entry: AmazonConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Amazon Devices switches based on a config entry."""
|
||||
"""Set up Alexa Devices switches based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_CONTROL
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import Unauthorized, UnknownUser
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import async_extract_entity_ids
|
||||
@@ -15,6 +15,7 @@ from .const import CAMERAS, DATA_AMCREST, DOMAIN
|
||||
from .helpers import service_signal
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Amcrest IP Camera services."""
|
||||
|
||||
|
||||
@@ -366,15 +366,35 @@ class AnthropicConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
||||
await self._async_handle_chat_log(chat_log)
|
||||
|
||||
response_content = chat_log.content[-1]
|
||||
if not isinstance(response_content, conversation.AssistantContent):
|
||||
raise TypeError("Last message must be an assistant message")
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[ToolParam] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
@@ -424,7 +444,7 @@ class AnthropicConversationEntity(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
@@ -435,17 +455,6 @@ class AnthropicConversationEntity(
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
response_content = chat_log.content[-1]
|
||||
if not isinstance(response_content, conversation.AssistantContent):
|
||||
raise TypeError("Last message must be an assistant message")
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response.async_set_speech(response_content.content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_log.conversation_id,
|
||||
continue_conversation=chat_log.continue_conversation,
|
||||
)
|
||||
|
||||
async def _async_entry_update_listener(
|
||||
self, hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
|
||||
@@ -89,7 +89,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
def get_aruba_data(self) -> dict[str, dict[str, str]] | None:
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
connect = f"ssh {self.username}@{self.host}"
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS
|
||||
from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -72,7 +72,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> b
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Blink."""
|
||||
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_PIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
@@ -21,34 +21,36 @@ SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Blink integration."""
|
||||
|
||||
async def send_pin(call: ServiceCall):
|
||||
"""Call blink to send new pin."""
|
||||
config_entry: BlinkConfigEntry | None
|
||||
for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]:
|
||||
if not (config_entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
await coordinator.api.auth.send_auth_key(
|
||||
coordinator.api,
|
||||
call.data[CONF_PIN],
|
||||
async def _send_pin(call: ServiceCall) -> None:
|
||||
"""Call blink to send new pin."""
|
||||
config_entry: BlinkConfigEntry | None
|
||||
for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]:
|
||||
if not (config_entry := call.hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
await coordinator.api.auth.send_auth_key(
|
||||
coordinator.api,
|
||||
call.data[CONF_PIN],
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Blink integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_PIN,
|
||||
send_pin,
|
||||
_send_pin,
|
||||
schema=SERVICE_SEND_PIN_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -27,7 +27,6 @@ from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.temperature import display_temp as show_temp
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_suggest_report_issue
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
@@ -535,26 +534,6 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return
|
||||
modes_str: str = ", ".join(modes) if modes else ""
|
||||
translation_key = f"not_valid_{mode_type}_mode"
|
||||
if mode_type == "hvac":
|
||||
report_issue = async_suggest_report_issue(
|
||||
self.hass,
|
||||
integration_domain=self.platform.platform_name,
|
||||
module=type(self).__module__,
|
||||
)
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"%s::%s sets the hvac_mode %s which is not "
|
||||
"valid for this entity with modes: %s. "
|
||||
"This will stop working in 2025.4 and raise an error instead. "
|
||||
"Please %s"
|
||||
),
|
||||
self.platform.platform_name,
|
||||
self.__class__.__name__,
|
||||
mode,
|
||||
modes_str,
|
||||
report_issue,
|
||||
)
|
||||
return
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=translation_key,
|
||||
|
||||
@@ -258,6 +258,9 @@
|
||||
"not_valid_preset_mode": {
|
||||
"message": "Preset mode {mode} is not valid. Valid preset modes are: {modes}."
|
||||
},
|
||||
"not_valid_hvac_mode": {
|
||||
"message": "HVAC mode {mode} is not valid. Valid HVAC modes are: {modes}."
|
||||
},
|
||||
"not_valid_swing_mode": {
|
||||
"message": "Swing mode {mode} is not valid. Valid swing modes are: {modes}."
|
||||
},
|
||||
|
||||
@@ -9,12 +9,11 @@ from typing import Any
|
||||
from homeassistant.components.notify import BaseNotificationService
|
||||
from homeassistant.const import CONF_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util.process import kill_subprocess
|
||||
|
||||
from .const import CONF_COMMAND_TIMEOUT, LOGGER
|
||||
from .utils import render_template_args
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,28 +44,10 @@ class CommandLineNotificationService(BaseNotificationService):
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a command line."""
|
||||
command = self.command
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, self.hass)
|
||||
if not (command := render_template_args(self.hass, self.command)):
|
||||
return
|
||||
|
||||
rendered_args = None
|
||||
if args_compiled:
|
||||
args_to_render = {"arguments": args}
|
||||
try:
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return
|
||||
|
||||
if rendered_args != args:
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s, with message: %s", command, message)
|
||||
LOGGER.debug("Running with message: %s", message)
|
||||
|
||||
with subprocess.Popen( # noqa: S602 # shell by design
|
||||
command,
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.const import (
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.template import Template
|
||||
@@ -37,7 +36,7 @@ from .const import (
|
||||
LOGGER,
|
||||
TRIGGER_ENTITY_OPTIONS,
|
||||
)
|
||||
from .utils import async_check_output_or_log
|
||||
from .utils import async_check_output_or_log, render_template_args
|
||||
|
||||
DEFAULT_NAME = "Command Sensor"
|
||||
|
||||
@@ -222,32 +221,6 @@ class CommandSensorData:
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data with a shell command."""
|
||||
command = self.command
|
||||
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, self.hass)
|
||||
|
||||
if args_compiled:
|
||||
try:
|
||||
args_to_render = {"arguments": args}
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return
|
||||
else:
|
||||
rendered_args = None
|
||||
|
||||
if rendered_args == args:
|
||||
# No template used. default behavior
|
||||
pass
|
||||
else:
|
||||
# Template used. Construct the string used in the shell
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s", command)
|
||||
if not (command := render_template_args(self.hass, self.command)):
|
||||
return
|
||||
self.value = await async_check_output_or_log(command, self.timeout)
|
||||
|
||||
@@ -3,9 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
from .const import LOGGER
|
||||
|
||||
_EXEC_FAILED_CODE = 127
|
||||
|
||||
|
||||
@@ -18,7 +22,7 @@ async def async_call_shell_with_timeout(
|
||||
return code is returned.
|
||||
"""
|
||||
try:
|
||||
_LOGGER.debug("Running command: %s", command)
|
||||
LOGGER.debug("Running command: %s", command)
|
||||
proc = await asyncio.create_subprocess_shell( # shell by design
|
||||
command,
|
||||
close_fds=False, # required for posix_spawn
|
||||
@@ -26,14 +30,14 @@ async def async_call_shell_with_timeout(
|
||||
async with asyncio.timeout(timeout):
|
||||
await proc.communicate()
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout for command: %s", command)
|
||||
LOGGER.error("Timeout for command: %s", command)
|
||||
return -1
|
||||
|
||||
return_code = proc.returncode
|
||||
if return_code == _EXEC_FAILED_CODE:
|
||||
_LOGGER.error("Error trying to exec command: %s", command)
|
||||
LOGGER.error("Error trying to exec command: %s", command)
|
||||
elif log_return_code and return_code != 0:
|
||||
_LOGGER.error(
|
||||
LOGGER.error(
|
||||
"Command failed (with return code %s): %s",
|
||||
proc.returncode,
|
||||
command,
|
||||
@@ -53,12 +57,39 @@ async def async_check_output_or_log(command: str, timeout: int) -> str | None:
|
||||
stdout, _ = await proc.communicate()
|
||||
|
||||
if proc.returncode != 0:
|
||||
_LOGGER.error(
|
||||
LOGGER.error(
|
||||
"Command failed (with return code %s): %s", proc.returncode, command
|
||||
)
|
||||
else:
|
||||
return stdout.strip().decode("utf-8")
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout for command: %s", command)
|
||||
LOGGER.error("Timeout for command: %s", command)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def render_template_args(hass: HomeAssistant, command: str) -> str | None:
|
||||
"""Render template arguments for command line utilities."""
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, hass)
|
||||
|
||||
rendered_args = None
|
||||
if args_compiled:
|
||||
args_to_render = {"arguments": args}
|
||||
try:
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return None
|
||||
|
||||
if rendered_args != args:
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s", command)
|
||||
|
||||
return command
|
||||
|
||||
@@ -14,12 +14,11 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, intent, llm, template
|
||||
from homeassistant.helpers import chat_session, frame, intent, llm, template
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import trace
|
||||
from .const import DOMAIN
|
||||
from .models import ConversationInput, ConversationResult
|
||||
|
||||
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
|
||||
@@ -359,7 +358,7 @@ class ChatLog:
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
prompt: str,
|
||||
language: str,
|
||||
language: str | None,
|
||||
user_name: str | None = None,
|
||||
) -> str:
|
||||
try:
|
||||
@@ -373,7 +372,7 @@ class ChatLog:
|
||||
)
|
||||
except TemplateError as err:
|
||||
LOGGER.error("Error rendering prompt: %s", err)
|
||||
intent_response = intent.IntentResponse(language=language)
|
||||
intent_response = intent.IntentResponse(language=language or "")
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Sorry, I had a problem with my template",
|
||||
@@ -392,15 +391,25 @@ class ChatLog:
|
||||
user_llm_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_context = llm.LLMContext(
|
||||
platform=conversing_domain,
|
||||
context=user_input.context,
|
||||
user_prompt=user_input.text,
|
||||
language=user_input.language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
frame.report_usage(
|
||||
"ChatLog.async_update_llm_data",
|
||||
breaks_in_ha_version="2026.1",
|
||||
)
|
||||
return await self.async_provide_llm_data(
|
||||
llm_context=user_input.as_llm_context(conversing_domain),
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
user_llm_prompt=user_llm_prompt,
|
||||
user_extra_system_prompt=user_input.extra_system_prompt,
|
||||
)
|
||||
|
||||
async def async_provide_llm_data(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
user_llm_hass_api: str | list[str] | None = None,
|
||||
user_llm_prompt: str | None = None,
|
||||
user_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api:
|
||||
@@ -414,10 +423,12 @@ class ChatLog:
|
||||
LOGGER.error(
|
||||
"Error getting LLM API %s for %s: %s",
|
||||
user_llm_hass_api,
|
||||
conversing_domain,
|
||||
llm_context.platform,
|
||||
err,
|
||||
)
|
||||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
intent_response = intent.IntentResponse(
|
||||
language=llm_context.language or ""
|
||||
)
|
||||
intent_response.async_set_error(
|
||||
intent.IntentResponseErrorCode.UNKNOWN,
|
||||
"Error preparing LLM API",
|
||||
@@ -431,10 +442,10 @@ class ChatLog:
|
||||
user_name: str | None = None
|
||||
|
||||
if (
|
||||
user_input.context
|
||||
and user_input.context.user_id
|
||||
llm_context.context
|
||||
and llm_context.context.user_id
|
||||
and (
|
||||
user := await self.hass.auth.async_get_user(user_input.context.user_id)
|
||||
user := await self.hass.auth.async_get_user(llm_context.context.user_id)
|
||||
)
|
||||
):
|
||||
user_name = user.name
|
||||
@@ -444,7 +455,7 @@ class ChatLog:
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
(user_llm_prompt or llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
user_input.language,
|
||||
llm_context.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
@@ -456,14 +467,14 @@ class ChatLog:
|
||||
await self._async_expand_prompt_template(
|
||||
llm_context,
|
||||
llm.BASE_PROMPT,
|
||||
user_input.language,
|
||||
llm_context.language,
|
||||
user_name,
|
||||
)
|
||||
)
|
||||
|
||||
if extra_system_prompt := (
|
||||
# Take new system prompt if one was given
|
||||
user_input.extra_system_prompt or self.extra_system_prompt
|
||||
user_extra_system_prompt or self.extra_system_prompt
|
||||
):
|
||||
prompt_parts.append(extra_system_prompt)
|
||||
|
||||
|
||||
@@ -7,7 +7,9 @@ from dataclasses import dataclass
|
||||
from typing import Any, Literal
|
||||
|
||||
from homeassistant.core import Context
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers import intent, llm
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -56,6 +58,16 @@ class ConversationInput:
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
}
|
||||
|
||||
def as_llm_context(self, conversing_domain: str) -> llm.LLMContext:
|
||||
"""Return input as an LLM context."""
|
||||
return llm.LLMContext(
|
||||
platform=conversing_domain,
|
||||
context=self.context,
|
||||
language=self.language,
|
||||
assistant=DOMAIN,
|
||||
device_id=self.device_id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ConversationResult:
|
||||
|
||||
@@ -164,8 +164,6 @@ class DeconzThermostat(DeconzDevice[Thermostat], ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode not in self._attr_hvac_modes:
|
||||
raise ValueError(f"Unsupported HVAC mode {hvac_mode}")
|
||||
|
||||
if len(self._attr_hvac_modes) == 2: # Only allow turn on and off thermostat
|
||||
await self.hub.api.sensors.thermostat.set_config(
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==3.4.0"]
|
||||
"requirements": ["aiodns==3.5.0"]
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import threading
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.util import raise_if_invalid_filename, raise_if_invalid_path
|
||||
@@ -141,6 +141,7 @@ def download_file(service: ServiceCall) -> None:
|
||||
threading.Thread(target=do_download).start()
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the services for the downloader component."""
|
||||
async_register_admin_service(
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -63,6 +63,7 @@ def _set_time_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(service).set_time(dt_util.now())
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Create ElkM1 services."""
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
from eq3btsmart.thermostat_config import ThermostatConfig
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -53,12 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
f"[{eq3_config.mac_address}] Device could not be found"
|
||||
)
|
||||
|
||||
thermostat = Thermostat(
|
||||
thermostat_config=ThermostatConfig(
|
||||
mac_address=mac_address,
|
||||
),
|
||||
ble_device=device,
|
||||
)
|
||||
thermostat = Thermostat(mac_address=device) # type: ignore[arg-type]
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
@@ -80,7 +79,4 @@ class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the binary sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
"""Platform for eQ-3 climate entities."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
|
||||
from eq3btsmart.const import (
|
||||
EQ3_DEFAULT_AWAY_TEMP,
|
||||
EQ3_MAX_TEMP,
|
||||
EQ3_OFF_TEMP,
|
||||
Eq3OperationMode,
|
||||
Eq3Preset,
|
||||
)
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
@@ -20,9 +27,11 @@ from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEFAULT_AWAY_HOURS,
|
||||
EQ_TO_HA_HVAC,
|
||||
HA_TO_EQ_HVAC,
|
||||
CurrentTemperatureSelector,
|
||||
@@ -57,8 +66,8 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_min_temp = EQ3BT_OFF_TEMP
|
||||
_attr_max_temp = EQ3BT_MAX_TEMP
|
||||
_attr_min_temp = EQ3_OFF_TEMP
|
||||
_attr_max_temp = EQ3_MAX_TEMP
|
||||
_attr_precision = PRECISION_HALVES
|
||||
_attr_hvac_modes = list(HA_TO_EQ_HVAC.keys())
|
||||
_attr_preset_modes = list(Preset)
|
||||
@@ -70,38 +79,21 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
_target_temperature: float | None = None
|
||||
|
||||
@callback
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
||||
if self._thermostat.status is not None:
|
||||
self._async_on_status_updated()
|
||||
|
||||
if self._thermostat.device_data is not None:
|
||||
self._async_on_device_updated()
|
||||
|
||||
super()._async_on_updated()
|
||||
|
||||
@callback
|
||||
def _async_on_status_updated(self) -> None:
|
||||
def _async_on_status_updated(self, data: Any) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
if self._thermostat.status is None:
|
||||
return
|
||||
|
||||
self._target_temperature = self._thermostat.status.target_temperature.value
|
||||
self._target_temperature = self._thermostat.status.target_temperature
|
||||
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
|
||||
self._attr_current_temperature = self._get_current_temperature()
|
||||
self._attr_target_temperature = self._get_target_temperature()
|
||||
self._attr_preset_mode = self._get_current_preset_mode()
|
||||
self._attr_hvac_action = self._get_current_hvac_action()
|
||||
super()._async_on_status_updated(data)
|
||||
|
||||
@callback
|
||||
def _async_on_device_updated(self) -> None:
|
||||
def _async_on_device_updated(self, data: Any) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
if self._thermostat.device_data is None:
|
||||
return
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
@@ -109,8 +101,9 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
sw_version=str(self._thermostat.device_data.firmware_version),
|
||||
serial_number=self._thermostat.device_data.device_serial.value,
|
||||
serial_number=self._thermostat.device_data.device_serial,
|
||||
)
|
||||
super()._async_on_device_updated(data)
|
||||
|
||||
def _get_current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
@@ -119,17 +112,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case CurrentTemperatureSelector.NOTHING:
|
||||
return None
|
||||
case CurrentTemperatureSelector.VALVE:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.valve_temperature)
|
||||
case CurrentTemperatureSelector.UI:
|
||||
return self._target_temperature
|
||||
case CurrentTemperatureSelector.DEVICE:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.target_temperature.value)
|
||||
return float(self._thermostat.status.target_temperature)
|
||||
case CurrentTemperatureSelector.ENTITY:
|
||||
state = self.hass.states.get(self._eq3_config.external_temp_sensor)
|
||||
if state is not None:
|
||||
@@ -147,16 +134,12 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case TargetTemperatureSelector.TARGET:
|
||||
return self._target_temperature
|
||||
case TargetTemperatureSelector.LAST_REPORTED:
|
||||
if self._thermostat.status is None:
|
||||
return None
|
||||
|
||||
return float(self._thermostat.status.target_temperature.value)
|
||||
return float(self._thermostat.status.target_temperature)
|
||||
|
||||
def _get_current_preset_mode(self) -> str:
|
||||
"""Return the current preset mode."""
|
||||
|
||||
if (status := self._thermostat.status) is None:
|
||||
return PRESET_NONE
|
||||
status = self._thermostat.status
|
||||
if status.is_window_open:
|
||||
return Preset.WINDOW_OPEN
|
||||
if status.is_boost:
|
||||
@@ -165,7 +148,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
return Preset.LOW_BATTERY
|
||||
if status.is_away:
|
||||
return Preset.AWAY
|
||||
if status.operation_mode is OperationMode.ON:
|
||||
if status.operation_mode is Eq3OperationMode.ON:
|
||||
return Preset.OPEN
|
||||
if status.presets is None:
|
||||
return PRESET_NONE
|
||||
@@ -179,10 +162,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
def _get_current_hvac_action(self) -> HVACAction:
|
||||
"""Return the current hvac action."""
|
||||
|
||||
if (
|
||||
self._thermostat.status is None
|
||||
or self._thermostat.status.operation_mode is OperationMode.OFF
|
||||
):
|
||||
if self._thermostat.status.operation_mode is Eq3OperationMode.OFF:
|
||||
return HVACAction.OFF
|
||||
if self._thermostat.status.valve == 0:
|
||||
return HVACAction.IDLE
|
||||
@@ -227,7 +207,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
"""Set new target hvac mode."""
|
||||
|
||||
if hvac_mode is HVACMode.OFF:
|
||||
await self.async_set_temperature(temperature=EQ3BT_OFF_TEMP)
|
||||
await self.async_set_temperature(temperature=EQ3_OFF_TEMP)
|
||||
|
||||
try:
|
||||
await self._thermostat.async_set_mode(HA_TO_EQ_HVAC[hvac_mode])
|
||||
@@ -241,10 +221,11 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
case Preset.BOOST:
|
||||
await self._thermostat.async_set_boost(True)
|
||||
case Preset.AWAY:
|
||||
await self._thermostat.async_set_away(True)
|
||||
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
|
||||
await self._thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
|
||||
case Preset.ECO:
|
||||
await self._thermostat.async_set_preset(Eq3Preset.ECO)
|
||||
case Preset.COMFORT:
|
||||
await self._thermostat.async_set_preset(Eq3Preset.COMFORT)
|
||||
case Preset.OPEN:
|
||||
await self._thermostat.async_set_mode(OperationMode.ON)
|
||||
await self._thermostat.async_set_mode(Eq3OperationMode.ON)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from eq3btsmart.const import OperationMode
|
||||
from eq3btsmart.const import Eq3OperationMode
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
@@ -34,17 +34,17 @@ ENTITY_KEY_AWAY_UNTIL = "away_until"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
|
||||
OperationMode.OFF: HVACMode.OFF,
|
||||
OperationMode.ON: HVACMode.HEAT,
|
||||
OperationMode.AUTO: HVACMode.AUTO,
|
||||
OperationMode.MANUAL: HVACMode.HEAT,
|
||||
EQ_TO_HA_HVAC: dict[Eq3OperationMode, HVACMode] = {
|
||||
Eq3OperationMode.OFF: HVACMode.OFF,
|
||||
Eq3OperationMode.ON: HVACMode.HEAT,
|
||||
Eq3OperationMode.AUTO: HVACMode.AUTO,
|
||||
Eq3OperationMode.MANUAL: HVACMode.HEAT,
|
||||
}
|
||||
|
||||
HA_TO_EQ_HVAC = {
|
||||
HVACMode.OFF: OperationMode.OFF,
|
||||
HVACMode.AUTO: OperationMode.AUTO,
|
||||
HVACMode.HEAT: OperationMode.MANUAL,
|
||||
HVACMode.OFF: Eq3OperationMode.OFF,
|
||||
HVACMode.AUTO: Eq3OperationMode.AUTO,
|
||||
HVACMode.HEAT: Eq3OperationMode.MANUAL,
|
||||
}
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ class TargetTemperatureSelector(str, Enum):
|
||||
DEFAULT_CURRENT_TEMP_SELECTOR = CurrentTemperatureSelector.DEVICE
|
||||
DEFAULT_TARGET_TEMP_SELECTOR = TargetTemperatureSelector.TARGET
|
||||
DEFAULT_SCAN_INTERVAL = 10 # seconds
|
||||
DEFAULT_AWAY_HOURS = 30 * 24
|
||||
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected"
|
||||
SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected"
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
"""Base class for all eQ-3 entities."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Eq3Exception
|
||||
from eq3btsmart.const import Eq3Event
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
@@ -45,7 +50,15 @@ class Eq3Entity(Entity):
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
|
||||
)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
self._thermostat.register_callback(
|
||||
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
@@ -65,10 +78,25 @@ class Eq3Entity(Entity):
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.DEVICE_DATA_RECEIVED, self._async_on_device_updated
|
||||
)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.STATUS_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
self._thermostat.unregister_callback(
|
||||
Eq3Event.SCHEDULE_RECEIVED, self._async_on_status_updated
|
||||
)
|
||||
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
@callback
|
||||
def _async_on_status_updated(self, data: Any) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_device_updated(self, data: Any) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -90,4 +118,9 @@ class Eq3Entity(Entity):
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available."""
|
||||
|
||||
return self._thermostat.status is not None and self._attr_available
|
||||
try:
|
||||
_ = self._thermostat.status
|
||||
except Eq3Exception:
|
||||
return False
|
||||
|
||||
return self._attr_available
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.16.0"]
|
||||
"requirements": ["eq3btsmart==2.1.0", "bleak-esphome==2.16.0"]
|
||||
}
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
"""Platform for eq3 number entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import (
|
||||
EQ3BT_MAX_OFFSET,
|
||||
EQ3BT_MAX_TEMP,
|
||||
EQ3BT_MIN_OFFSET,
|
||||
EQ3BT_MIN_TEMP,
|
||||
)
|
||||
from eq3btsmart.models import Presets
|
||||
from eq3btsmart.const import EQ3_MAX_OFFSET, EQ3_MAX_TEMP, EQ3_MIN_OFFSET, EQ3_MIN_TEMP
|
||||
from eq3btsmart.models import Presets, Status
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
@@ -42,7 +37,7 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
value_func: Callable[[Presets], float]
|
||||
value_set_func: Callable[
|
||||
[Thermostat],
|
||||
Callable[[float], Awaitable[None]],
|
||||
Callable[[float], Coroutine[None, None, Status]],
|
||||
]
|
||||
mode: NumberMode = NumberMode.BOX
|
||||
entity_category: EntityCategory | None = EntityCategory.CONFIG
|
||||
@@ -51,44 +46,44 @@ class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_COMFORT,
|
||||
value_func=lambda presets: presets.comfort_temperature.value,
|
||||
value_func=lambda presets: presets.comfort_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature,
|
||||
translation_key=ENTITY_KEY_COMFORT,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_ECO,
|
||||
value_func=lambda presets: presets.eco_temperature.value,
|
||||
value_func=lambda presets: presets.eco_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature,
|
||||
translation_key=ENTITY_KEY_ECO,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
value_func=lambda presets: presets.window_open_temperature.value,
|
||||
value_func=lambda presets: presets.window_open_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_min_value=EQ3_MIN_TEMP,
|
||||
native_max_value=EQ3_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_OFFSET,
|
||||
value_func=lambda presets: presets.offset_temperature.value,
|
||||
value_func=lambda presets: presets.offset_temperature,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset,
|
||||
translation_key=ENTITY_KEY_OFFSET,
|
||||
native_min_value=EQ3BT_MIN_OFFSET,
|
||||
native_max_value=EQ3BT_MAX_OFFSET,
|
||||
native_min_value=EQ3_MIN_OFFSET,
|
||||
native_max_value=EQ3_MAX_OFFSET,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
@@ -96,7 +91,7 @@ NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration,
|
||||
value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60,
|
||||
value_func=lambda presets: presets.window_open_time.total_seconds() / 60,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
@@ -137,7 +132,6 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Return the state of the entity."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
assert self._thermostat.status.presets is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status.presets)
|
||||
@@ -152,7 +146,7 @@ class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Return whether the entity is available."""
|
||||
|
||||
return (
|
||||
self._thermostat.status is not None
|
||||
super().available
|
||||
and self._thermostat.status.presets is not None
|
||||
and self._attr_available
|
||||
)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"""Voluptuous schemas for eq3btsmart."""
|
||||
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_MIN_TEMP
|
||||
from eq3btsmart.const import EQ3_MAX_TEMP, EQ3_MIN_TEMP
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
SCHEMA_TEMPERATURE = vol.Range(min=EQ3BT_MIN_TEMP, max=EQ3BT_MAX_TEMP)
|
||||
SCHEMA_TEMPERATURE = vol.Range(min=EQ3_MIN_TEMP, max=EQ3_MAX_TEMP)
|
||||
SCHEMA_DEVICE = vol.Schema({vol.Required(CONF_MAC): cv.string})
|
||||
SCHEMA_MAC = vol.Schema(
|
||||
{
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
@@ -40,9 +39,7 @@ SENSOR_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SensorEntityDescription(
|
||||
key=ENTITY_KEY_AWAY_UNTIL,
|
||||
translation_key=ENTITY_KEY_AWAY_UNTIL,
|
||||
value_func=lambda status: (
|
||||
status.away_until.value if status.away_until else None
|
||||
),
|
||||
value_func=lambda status: (status.away_until if status.away_until else None),
|
||||
device_class=SensorDeviceClass.DATE,
|
||||
),
|
||||
]
|
||||
@@ -78,7 +75,4 @@ class Eq3SensorEntity(Eq3Entity, SensorEntity):
|
||||
def native_value(self) -> int | datetime | None:
|
||||
"""Return the value reported by the sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
||||
@@ -1,26 +1,45 @@
|
||||
"""Platform for eq3 switch entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import EQ3_DEFAULT_AWAY_TEMP, Eq3OperationMode
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK
|
||||
from .const import (
|
||||
DEFAULT_AWAY_HOURS,
|
||||
ENTITY_KEY_AWAY,
|
||||
ENTITY_KEY_BOOST,
|
||||
ENTITY_KEY_LOCK,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
async def async_set_away(thermostat: Thermostat, enable: bool) -> Status:
|
||||
"""Backport old async_set_away behavior."""
|
||||
|
||||
if not enable:
|
||||
return await thermostat.async_set_mode(Eq3OperationMode.AUTO)
|
||||
|
||||
away_until = dt_util.now() + timedelta(hours=DEFAULT_AWAY_HOURS)
|
||||
return await thermostat.async_set_away(away_until, EQ3_DEFAULT_AWAY_TEMP)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3SwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Entity description for eq3 switch entities."""
|
||||
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]]
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Coroutine[None, None, Status]]]
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
@@ -40,7 +59,7 @@ SWITCH_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_AWAY,
|
||||
translation_key=ENTITY_KEY_AWAY,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_away,
|
||||
toggle_func=lambda thermostat: partial(async_set_away, thermostat),
|
||||
value_func=lambda status: status.is_away,
|
||||
),
|
||||
]
|
||||
@@ -88,7 +107,4 @@ class Eq3SwitchEntity(Eq3Entity, SwitchEntity):
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the switch."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
@@ -35,6 +35,7 @@ async def _async_service_handle(service: ServiceCall) -> None:
|
||||
async_dispatcher_send(service.hass, SIGNAL_FFMPEG_RESTART, entity_ids)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register FFmpeg services."""
|
||||
|
||||
|
||||
@@ -28,45 +28,36 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Fibaro covers."""
|
||||
controller = entry.runtime_data
|
||||
async_add_entities(
|
||||
[FibaroCover(device) for device in controller.fibaro_devices[Platform.COVER]],
|
||||
True,
|
||||
)
|
||||
|
||||
entities: list[FibaroEntity] = []
|
||||
for device in controller.fibaro_devices[Platform.COVER]:
|
||||
# Positionable covers report the position over value
|
||||
if device.value.has_value:
|
||||
entities.append(PositionableFibaroCover(device))
|
||||
else:
|
||||
entities.append(FibaroCover(device))
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class FibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Representation a Fibaro Cover."""
|
||||
class PositionableFibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Representation of a fibaro cover which supports positioning."""
|
||||
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the Vera device."""
|
||||
"""Initialize the device."""
|
||||
super().__init__(fibaro_device)
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
if self._is_open_close_only():
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if "stop" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
|
||||
@staticmethod
|
||||
def bound(position):
|
||||
def bound(position: int | None) -> int | None:
|
||||
"""Normalize the position."""
|
||||
if position is None:
|
||||
return None
|
||||
position = int(position)
|
||||
if position <= 5:
|
||||
return 0
|
||||
if position >= 95:
|
||||
return 100
|
||||
return position
|
||||
|
||||
def _is_open_close_only(self) -> bool:
|
||||
"""Return if only open / close is supported."""
|
||||
# Normally positionable devices report the position over value,
|
||||
# so if it is missing we have a device which supports open / close only
|
||||
return not self.fibaro_device.value.has_value
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the state."""
|
||||
super().update()
|
||||
@@ -74,20 +65,15 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
self._attr_current_cover_position = self.bound(self.level)
|
||||
self._attr_current_cover_tilt_position = self.bound(self.level2)
|
||||
|
||||
device_state = self.fibaro_device.state
|
||||
|
||||
# Be aware that opening and closing is only available for some modern
|
||||
# devices.
|
||||
# For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
if device_state.has_value:
|
||||
self._attr_is_opening = device_state.str_value().lower() == "opening"
|
||||
self._attr_is_closing = device_state.str_value().lower() == "closing"
|
||||
device_state = self.fibaro_device.state.str_value(default="").lower()
|
||||
self._attr_is_opening = device_state == "opening"
|
||||
self._attr_is_closing = device_state == "closing"
|
||||
|
||||
closed: bool | None = None
|
||||
if self._is_open_close_only():
|
||||
if device_state.has_value and device_state.str_value().lower() != "unknown":
|
||||
closed = device_state.str_value().lower() == "closed"
|
||||
elif self.current_cover_position is not None:
|
||||
if self.current_cover_position is not None:
|
||||
closed = self.current_cover_position == 0
|
||||
self._attr_is_closed = closed
|
||||
|
||||
@@ -96,7 +82,7 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
self.set_level(cast(int, kwargs.get(ATTR_POSITION)))
|
||||
|
||||
def set_cover_tilt_position(self, **kwargs: Any) -> None:
|
||||
"""Move the cover to a specific position."""
|
||||
"""Move the slats to a specific position."""
|
||||
self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION)))
|
||||
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
@@ -118,3 +104,62 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
def stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
self.action("stop")
|
||||
|
||||
|
||||
class FibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Representation of a fibaro cover which supports only open / close commands."""
|
||||
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the device."""
|
||||
super().__init__(fibaro_device)
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if "stop" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
if "rotateSlatsUp" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.OPEN_TILT
|
||||
if "rotateSlatsDown" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.CLOSE_TILT
|
||||
if "stopSlats" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP_TILT
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the state."""
|
||||
super().update()
|
||||
|
||||
device_state = self.fibaro_device.state.str_value(default="").lower()
|
||||
|
||||
self._attr_is_opening = device_state == "opening"
|
||||
self._attr_is_closing = device_state == "closing"
|
||||
|
||||
closed: bool | None = None
|
||||
if device_state not in {"", "unknown"}:
|
||||
closed = device_state == "closed"
|
||||
self._attr_is_closed = closed
|
||||
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
self.action("open")
|
||||
|
||||
def close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
self.action("close")
|
||||
|
||||
def stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
self.action("stop")
|
||||
|
||||
def open_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Open the cover slats."""
|
||||
self.action("rotateSlatsUp")
|
||||
|
||||
def close_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Close the cover slats."""
|
||||
self.action("rotateSlatsDown")
|
||||
|
||||
def stop_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover slats turning."""
|
||||
self.action("stopSlats")
|
||||
|
||||
@@ -83,8 +83,8 @@ class FibaroLight(FibaroEntity, LightEntity):
|
||||
)
|
||||
supports_dimming = (
|
||||
fibaro_device.has_interface("levelChange")
|
||||
and "setValue" in fibaro_device.actions
|
||||
)
|
||||
or fibaro_device.type == "com.fibaro.multilevelSwitch"
|
||||
) and "setValue" in fibaro_device.actions
|
||||
|
||||
if supports_color and supports_white_v:
|
||||
self._attr_supported_color_modes = {ColorMode.RGBW}
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"auth": {
|
||||
"title": "Link Fitbit"
|
||||
|
||||
@@ -125,8 +125,6 @@ class Device(CoordinatorEntity[FreedomproDataUpdateCoordinator], ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Async function to set mode to climate."""
|
||||
if hvac_mode not in SUPPORTED_HVAC_MODES:
|
||||
raise ValueError(f"Got unsupported hvac_mode {hvac_mode}")
|
||||
|
||||
payload = {"heatingCoolingState": HVAC_INVERT_MAP[hvac_mode]}
|
||||
await put_state(
|
||||
|
||||
@@ -33,7 +33,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up fritzboxtools integration."""
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from fritzconnection.core.exceptions import (
|
||||
from fritzconnection.lib.fritzwlan import DEFAULT_PASSWORD_LENGTH
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.service import async_extract_config_entry_ids
|
||||
|
||||
@@ -64,7 +64,8 @@ async def _async_set_guest_wifi_password(service_call: ServiceCall) -> None:
|
||||
) from ex
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Fritz integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250531.0"]
|
||||
"requirements": ["home-assistant-frontend==20250531.3"]
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Fully Kiosk Browser."""
|
||||
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
@@ -23,71 +23,73 @@ from .const import (
|
||||
from .coordinator import FullyKioskDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async def _collect_coordinators(
|
||||
call: ServiceCall,
|
||||
) -> list[FullyKioskDataUpdateCoordinator]:
|
||||
device_ids: list[str] = call.data[ATTR_DEVICE_ID]
|
||||
config_entries = list[ConfigEntry]()
|
||||
registry = dr.async_get(call.hass)
|
||||
for target in device_ids:
|
||||
device = registry.async_get(target)
|
||||
if device:
|
||||
device_entries = list[ConfigEntry]()
|
||||
for entry_id in device.config_entries:
|
||||
entry = call.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry and entry.domain == DOMAIN:
|
||||
device_entries.append(entry)
|
||||
if not device_entries:
|
||||
raise HomeAssistantError(f"Device '{target}' is not a {DOMAIN} device")
|
||||
config_entries.extend(device_entries)
|
||||
else:
|
||||
raise HomeAssistantError(f"Device '{target}' not found in device registry")
|
||||
coordinators = list[FullyKioskDataUpdateCoordinator]()
|
||||
for config_entry in config_entries:
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(f"{config_entry.title} is not loaded")
|
||||
coordinators.append(config_entry.runtime_data)
|
||||
return coordinators
|
||||
|
||||
|
||||
async def _async_load_url(call: ServiceCall) -> None:
|
||||
"""Load a URL on the Fully Kiosk Browser."""
|
||||
for coordinator in await _collect_coordinators(call):
|
||||
await coordinator.fully.loadUrl(call.data[ATTR_URL])
|
||||
|
||||
|
||||
async def _async_start_app(call: ServiceCall) -> None:
|
||||
"""Start an app on the device."""
|
||||
for coordinator in await _collect_coordinators(call):
|
||||
await coordinator.fully.startApplication(call.data[ATTR_APPLICATION])
|
||||
|
||||
|
||||
async def _async_set_config(call: ServiceCall) -> None:
|
||||
"""Set a Fully Kiosk Browser config value on the device."""
|
||||
for coordinator in await _collect_coordinators(call):
|
||||
key = call.data[ATTR_KEY]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
# Fully API has different methods for setting string and bool values.
|
||||
# check if call.data[ATTR_VALUE] is a bool
|
||||
if isinstance(value, bool) or (
|
||||
isinstance(value, str) and value.lower() in ("true", "false")
|
||||
):
|
||||
await coordinator.fully.setConfigurationBool(key, value)
|
||||
else:
|
||||
# Convert any int values to string
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
|
||||
await coordinator.fully.setConfigurationString(key, value)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Fully Kiosk Browser integration."""
|
||||
|
||||
async def collect_coordinators(
|
||||
device_ids: list[str],
|
||||
) -> list[FullyKioskDataUpdateCoordinator]:
|
||||
config_entries = list[ConfigEntry]()
|
||||
registry = dr.async_get(hass)
|
||||
for target in device_ids:
|
||||
device = registry.async_get(target)
|
||||
if device:
|
||||
device_entries = list[ConfigEntry]()
|
||||
for entry_id in device.config_entries:
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if entry and entry.domain == DOMAIN:
|
||||
device_entries.append(entry)
|
||||
if not device_entries:
|
||||
raise HomeAssistantError(
|
||||
f"Device '{target}' is not a {DOMAIN} device"
|
||||
)
|
||||
config_entries.extend(device_entries)
|
||||
else:
|
||||
raise HomeAssistantError(
|
||||
f"Device '{target}' not found in device registry"
|
||||
)
|
||||
coordinators = list[FullyKioskDataUpdateCoordinator]()
|
||||
for config_entry in config_entries:
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(f"{config_entry.title} is not loaded")
|
||||
coordinators.append(config_entry.runtime_data)
|
||||
return coordinators
|
||||
|
||||
async def async_load_url(call: ServiceCall) -> None:
|
||||
"""Load a URL on the Fully Kiosk Browser."""
|
||||
for coordinator in await collect_coordinators(call.data[ATTR_DEVICE_ID]):
|
||||
await coordinator.fully.loadUrl(call.data[ATTR_URL])
|
||||
|
||||
async def async_start_app(call: ServiceCall) -> None:
|
||||
"""Start an app on the device."""
|
||||
for coordinator in await collect_coordinators(call.data[ATTR_DEVICE_ID]):
|
||||
await coordinator.fully.startApplication(call.data[ATTR_APPLICATION])
|
||||
|
||||
async def async_set_config(call: ServiceCall) -> None:
|
||||
"""Set a Fully Kiosk Browser config value on the device."""
|
||||
for coordinator in await collect_coordinators(call.data[ATTR_DEVICE_ID]):
|
||||
key = call.data[ATTR_KEY]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
# Fully API has different methods for setting string and bool values.
|
||||
# check if call.data[ATTR_VALUE] is a bool
|
||||
if isinstance(value, bool) or (
|
||||
isinstance(value, str) and value.lower() in ("true", "false")
|
||||
):
|
||||
await coordinator.fully.setConfigurationBool(key, value)
|
||||
else:
|
||||
# Convert any int values to string
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
|
||||
await coordinator.fully.setConfigurationString(key, value)
|
||||
|
||||
# Register all the above services
|
||||
service_mapping = [
|
||||
(async_load_url, SERVICE_LOAD_URL, ATTR_URL),
|
||||
(async_start_app, SERVICE_START_APPLICATION, ATTR_APPLICATION),
|
||||
(_async_load_url, SERVICE_LOAD_URL, ATTR_URL),
|
||||
(_async_start_app, SERVICE_START_APPLICATION, ATTR_APPLICATION),
|
||||
]
|
||||
for service_handler, service_name, attrib in service_mapping:
|
||||
hass.services.async_register(
|
||||
@@ -107,7 +109,7 @@ async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_CONFIG,
|
||||
async_set_config,
|
||||
_async_set_config,
|
||||
schema=vol.Schema(
|
||||
vol.All(
|
||||
{
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
@@ -49,6 +50,7 @@ async def _send_text_command(call: ServiceCall) -> ServiceResponse:
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Assistant SDK."""
|
||||
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -45,7 +45,10 @@ CONF_IMAGE_FILENAME = "image_filename"
|
||||
CONF_FILENAMES = "filenames"
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
PLATFORMS = (
|
||||
Platform.CONVERSATION,
|
||||
Platform.TTS,
|
||||
)
|
||||
|
||||
type GoogleGenerativeAIConfigEntry = ConfigEntry[Client]
|
||||
|
||||
|
||||
@@ -6,9 +6,11 @@ DOMAIN = "google_generative_ai_conversation"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
ATTR_MODEL = "model"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.0-flash"
|
||||
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
||||
@@ -2,63 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
from typing import Literal
|
||||
|
||||
from homeassistant.components import assist_pipeline, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm
|
||||
from homeassistant.helpers import intent
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
CONF_HATE_BLOCK_THRESHOLD,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_PROMPT,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
ERROR_GETTING_RESPONSE = (
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
from .const import CONF_PROMPT, DOMAIN, LOGGER
|
||||
from .entity import ERROR_GETTING_RESPONSE, GoogleGenerativeAILLMBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -71,265 +26,18 @@ async def async_setup_entry(
|
||||
async_add_entities([agent])
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
subschemas[0]
|
||||
) # Or, if not found, to any of the subschemas
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
parameters = _format_schema(
|
||||
convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
)
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _escape_decode(value: Any) -> Any:
|
||||
"""Recursively call codecs.escape_decode on all values."""
|
||||
if isinstance(value, str):
|
||||
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
|
||||
if isinstance(value, list):
|
||||
return [_escape_decode(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _escape_decode(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def _create_google_tool_response_parts(
|
||||
parts: list[conversation.ToolResultContent],
|
||||
) -> list[Part]:
|
||||
"""Create Google tool response parts."""
|
||||
return [
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in parts
|
||||
]
|
||||
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return Content(
|
||||
role="user",
|
||||
parts=_create_google_tool_response_parts(content),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
content: conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent,
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls:
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncGenerator[GenerateContentResponse],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
new_message = True
|
||||
try:
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response chunk: %s", response)
|
||||
chunk: conversation.AssistantContentDeltaDict = {}
|
||||
|
||||
if new_message:
|
||||
chunk["role"] = "assistant"
|
||||
new_message = False
|
||||
|
||||
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
|
||||
if response.prompt_feedback or not response.candidates:
|
||||
reason = (
|
||||
response.prompt_feedback.block_reason_message
|
||||
if response.prompt_feedback
|
||||
else "unknown"
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {reason}"
|
||||
)
|
||||
|
||||
candidate = response.candidates[0]
|
||||
|
||||
if (
|
||||
candidate.finish_reason is not None
|
||||
and candidate.finish_reason != "STOP"
|
||||
):
|
||||
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
|
||||
LOGGER.error(
|
||||
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
|
||||
candidate.finish_reason,
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
|
||||
)
|
||||
|
||||
response_parts = (
|
||||
candidate.content.parts
|
||||
if candidate.content is not None and candidate.content.parts is not None
|
||||
else []
|
||||
)
|
||||
|
||||
content = "".join([part.text for part in response_parts if part.text])
|
||||
tool_calls = []
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name if tool_call.name else ""
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
if tool_calls:
|
||||
chunk["tool_calls"] = tool_calls
|
||||
|
||||
chunk["content"] = content
|
||||
yield chunk
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
if isinstance(err, APIError):
|
||||
message = err.message
|
||||
else:
|
||||
message = type(err).__name__
|
||||
error = f"{ERROR_GETTING_RESPONSE}: {message}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
|
||||
class GoogleGenerativeAIConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
conversation.ConversationEntity,
|
||||
conversation.AbstractConversationAgent,
|
||||
GoogleGenerativeAILLMBaseEntity,
|
||||
):
|
||||
"""Google Generative AI conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supports_streaming = True
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
super().__init__(entry)
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
self._attr_supported_features = (
|
||||
conversation.ConversationEntityFeature.CONTROL
|
||||
@@ -356,13 +64,6 @@ class GoogleGenerativeAIConversationEntity(
|
||||
conversation.async_unset_agent(self.hass, self.entry)
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
def _fix_tool_name(self, tool_name: str) -> str:
|
||||
"""Fix tool name if needed."""
|
||||
# The Gemini 2.0+ tokenizer seemingly has a issue with the HassListAddItem tool
|
||||
# name. This makes sure when it incorrectly changes the name, that we change it
|
||||
# back for HA to call.
|
||||
return tool_name if tool_name != "HasListAddItem" else "HassListAddItem"
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
@@ -372,162 +73,16 @@ class GoogleGenerativeAIConversationEntity(
|
||||
options = self.entry.options
|
||||
|
||||
try:
|
||||
await chat_log.async_update_llm_data(
|
||||
DOMAIN,
|
||||
user_input,
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
options.get(CONF_LLM_HASS_API),
|
||||
options.get(CONF_PROMPT),
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
chat_request: str | list[Part] = user_input.text
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response_generator = await chat.send_message_stream(
|
||||
message=chat_request
|
||||
)
|
||||
except (
|
||||
APIError,
|
||||
ClientError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = ERROR_GETTING_RESPONSE
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
chat_request = _create_google_tool_response_parts(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
_transform_stream(chat_response_generator),
|
||||
)
|
||||
if isinstance(content, conversation.ToolResultContent)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
await self._async_handle_chat_log(chat_log)
|
||||
|
||||
response = intent.IntentResponse(language=user_input.language)
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
@@ -535,7 +90,7 @@ class GoogleGenerativeAIConversationEntity(
|
||||
"Last content in chat log is not an AssistantContent: %s. This could be due to the model not returning a valid response",
|
||||
chat_log.content[-1],
|
||||
)
|
||||
raise HomeAssistantError(f"{ERROR_GETTING_RESPONSE}")
|
||||
raise HomeAssistantError(ERROR_GETTING_RESPONSE)
|
||||
response.async_set_speech(chat_log.content[-1].content or "")
|
||||
return conversation.ConversationResult(
|
||||
response=response,
|
||||
|
||||
@@ -0,0 +1,475 @@
|
||||
"""Conversation support for the Google Generative AI Conversation integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from dataclasses import replace
|
||||
from typing import Any, cast
|
||||
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from google.genai.types import (
|
||||
AutomaticFunctionCallingConfig,
|
||||
Content,
|
||||
FunctionDeclaration,
|
||||
GenerateContentConfig,
|
||||
GenerateContentResponse,
|
||||
GoogleSearch,
|
||||
HarmCategory,
|
||||
Part,
|
||||
SafetySetting,
|
||||
Schema,
|
||||
Tool,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
CONF_HATE_BLOCK_THRESHOLD,
|
||||
CONF_MAX_TOKENS,
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
CONF_USE_GOOGLE_SEARCH_TOOL,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_TOP_K,
|
||||
RECOMMENDED_TOP_P,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
ERROR_GETTING_RESPONSE = (
|
||||
"Sorry, I had a problem getting a response from Google Generative AI."
|
||||
)
|
||||
|
||||
|
||||
SUPPORTED_SCHEMA_KEYS = {
|
||||
# Gemini API does not support all of the OpenAPI schema
|
||||
# SoT: https://ai.google.dev/api/caching#Schema
|
||||
"type",
|
||||
"format",
|
||||
"description",
|
||||
"nullable",
|
||||
"enum",
|
||||
"max_items",
|
||||
"min_items",
|
||||
"properties",
|
||||
"required",
|
||||
"items",
|
||||
}
|
||||
|
||||
|
||||
def _camel_to_snake(name: str) -> str:
|
||||
"""Convert camel case to snake case."""
|
||||
return "".join(["_" + c.lower() if c.isupper() else c for c in name]).lstrip("_")
|
||||
|
||||
|
||||
def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
"""Format the schema to be compatible with Gemini API."""
|
||||
if subschemas := schema.get("allOf"):
|
||||
for subschema in subschemas: # Gemini API does not support allOf keys
|
||||
if "type" in subschema: # Fallback to first subschema with 'type' field
|
||||
return _format_schema(subschema)
|
||||
return _format_schema(
|
||||
subschemas[0]
|
||||
) # Or, if not found, to any of the subschemas
|
||||
|
||||
result = {}
|
||||
for key, val in schema.items():
|
||||
key = _camel_to_snake(key)
|
||||
if key not in SUPPORTED_SCHEMA_KEYS:
|
||||
continue
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
result[key] = val
|
||||
|
||||
if result.get("enum") and result.get("type") != "STRING":
|
||||
# enum is only allowed for STRING type. This is safe as long as the schema
|
||||
# contains vol.Coerce for the respective type, for example:
|
||||
# vol.All(vol.Coerce(int), vol.In([1, 2, 3]))
|
||||
result["type"] = "STRING"
|
||||
result["enum"] = [str(item) for item in result["enum"]]
|
||||
|
||||
if result.get("type") == "OBJECT" and not result.get("properties"):
|
||||
# An object with undefined properties is not supported by Gemini API.
|
||||
# Fallback to JSON string. This will probably fail for most tools that want it,
|
||||
# but we don't have a better fallback strategy so far.
|
||||
result["properties"] = {"json": {"type": "STRING"}}
|
||||
result["required"] = []
|
||||
return cast(Schema, result)
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
|
||||
) -> Tool:
|
||||
"""Format tool specification."""
|
||||
|
||||
if tool.parameters.schema:
|
||||
parameters = _format_schema(
|
||||
convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
)
|
||||
else:
|
||||
parameters = None
|
||||
|
||||
return Tool(
|
||||
function_declarations=[
|
||||
FunctionDeclaration(
|
||||
name=tool.name,
|
||||
description=tool.description,
|
||||
parameters=parameters,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _escape_decode(value: Any) -> Any:
|
||||
"""Recursively call codecs.escape_decode on all values."""
|
||||
if isinstance(value, str):
|
||||
return codecs.escape_decode(bytes(value, "utf-8"))[0].decode("utf-8") # type: ignore[attr-defined]
|
||||
if isinstance(value, list):
|
||||
return [_escape_decode(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: _escape_decode(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def _create_google_tool_response_parts(
|
||||
parts: list[conversation.ToolResultContent],
|
||||
) -> list[Part]:
|
||||
"""Create Google tool response parts."""
|
||||
return [
|
||||
Part.from_function_response(
|
||||
name=tool_result.tool_name, response=tool_result.tool_result
|
||||
)
|
||||
for tool_result in parts
|
||||
]
|
||||
|
||||
|
||||
def _create_google_tool_response_content(
|
||||
content: list[conversation.ToolResultContent],
|
||||
) -> Content:
|
||||
"""Create a Google tool response content."""
|
||||
return Content(
|
||||
role="user",
|
||||
parts=_create_google_tool_response_parts(content),
|
||||
)
|
||||
|
||||
|
||||
def _convert_content(
|
||||
content: (
|
||||
conversation.UserContent
|
||||
| conversation.AssistantContent
|
||||
| conversation.SystemContent
|
||||
),
|
||||
) -> Content:
|
||||
"""Convert HA content to Google content."""
|
||||
if content.role != "assistant" or not content.tool_calls:
|
||||
role = "model" if content.role == "assistant" else content.role
|
||||
return Content(
|
||||
role=role,
|
||||
parts=[
|
||||
Part.from_text(text=content.content if content.content else ""),
|
||||
],
|
||||
)
|
||||
|
||||
# Handle the Assistant content with tool calls.
|
||||
assert type(content) is conversation.AssistantContent
|
||||
parts: list[Part] = []
|
||||
|
||||
if content.content:
|
||||
parts.append(Part.from_text(text=content.content))
|
||||
|
||||
if content.tool_calls:
|
||||
parts.extend(
|
||||
[
|
||||
Part.from_function_call(
|
||||
name=tool_call.tool_name,
|
||||
args=_escape_decode(tool_call.tool_args),
|
||||
)
|
||||
for tool_call in content.tool_calls
|
||||
]
|
||||
)
|
||||
|
||||
return Content(role="model", parts=parts)
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
result: AsyncGenerator[GenerateContentResponse],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
new_message = True
|
||||
try:
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response chunk: %s", response)
|
||||
chunk: conversation.AssistantContentDeltaDict = {}
|
||||
|
||||
if new_message:
|
||||
chunk["role"] = "assistant"
|
||||
new_message = False
|
||||
|
||||
# According to the API docs, this would mean no candidate is returned, so we can safely throw an error here.
|
||||
if response.prompt_feedback or not response.candidates:
|
||||
reason = (
|
||||
response.prompt_feedback.block_reason_message
|
||||
if response.prompt_feedback
|
||||
else "unknown"
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"The message got blocked due to content violations, reason: {reason}"
|
||||
)
|
||||
|
||||
candidate = response.candidates[0]
|
||||
|
||||
if (
|
||||
candidate.finish_reason is not None
|
||||
and candidate.finish_reason != "STOP"
|
||||
):
|
||||
# The message ended due to a content error as explained in: https://ai.google.dev/api/generate-content#FinishReason
|
||||
LOGGER.error(
|
||||
"Error in Google Generative AI response: %s, see: https://ai.google.dev/api/generate-content#FinishReason",
|
||||
candidate.finish_reason,
|
||||
)
|
||||
raise HomeAssistantError(
|
||||
f"{ERROR_GETTING_RESPONSE} Reason: {candidate.finish_reason}"
|
||||
)
|
||||
|
||||
response_parts = (
|
||||
candidate.content.parts
|
||||
if candidate.content is not None and candidate.content.parts is not None
|
||||
else []
|
||||
)
|
||||
|
||||
content = "".join([part.text for part in response_parts if part.text])
|
||||
tool_calls = []
|
||||
for part in response_parts:
|
||||
if not part.function_call:
|
||||
continue
|
||||
tool_call = part.function_call
|
||||
tool_name = tool_call.name if tool_call.name else ""
|
||||
tool_args = _escape_decode(tool_call.args)
|
||||
tool_calls.append(
|
||||
llm.ToolInput(tool_name=tool_name, tool_args=tool_args)
|
||||
)
|
||||
|
||||
if tool_calls:
|
||||
chunk["tool_calls"] = tool_calls
|
||||
|
||||
chunk["content"] = content
|
||||
yield chunk
|
||||
except (
|
||||
APIError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
if isinstance(err, APIError):
|
||||
message = err.message
|
||||
else:
|
||||
message = type(err).__name__
|
||||
error = f"{ERROR_GETTING_RESPONSE}: {message}"
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
|
||||
class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Google Generative AI base entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = entry.entry_id
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.entry.options
|
||||
|
||||
tools: list[Tool | Callable[..., Any]] | None = None
|
||||
if chat_log.llm_api:
|
||||
tools = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
# Using search grounding allows the model to retrieve information from the web,
|
||||
# however, it may interfere with how the model decides to use some tools, or entities
|
||||
# for example weather entity may be disregarded if the model chooses to Google it.
|
||||
if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True:
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
and "gemini-2.0-flash-preview-image-generation" not in model_name
|
||||
)
|
||||
|
||||
prompt_content = cast(
|
||||
conversation.SystemContent,
|
||||
chat_log.content[0],
|
||||
)
|
||||
|
||||
if prompt_content.content:
|
||||
prompt = prompt_content.content
|
||||
else:
|
||||
raise HomeAssistantError("Invalid prompt content")
|
||||
|
||||
messages: list[Content] = []
|
||||
|
||||
# Google groups tool results, we do not. Group them before sending.
|
||||
tool_results: list[conversation.ToolResultContent] = []
|
||||
|
||||
for chat_content in chat_log.content[1:-1]:
|
||||
if chat_content.role == "tool_result":
|
||||
tool_results.append(chat_content)
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(chat_content, conversation.ToolResultContent)
|
||||
and chat_content.content == ""
|
||||
):
|
||||
# Skipping is not possible since the number of function calls need to match the number of function responses
|
||||
# and skipping one would mean removing the other and hence this would prevent a proper chat log
|
||||
chat_content = replace(chat_content, content=" ")
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
tool_results.clear()
|
||||
|
||||
messages.append(_convert_content(chat_content))
|
||||
|
||||
# The SDK requires the first message to be a user message
|
||||
# This is not the case if user used `start_conversation`
|
||||
# Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537
|
||||
if messages and messages[0].role != "user":
|
||||
messages.insert(
|
||||
0,
|
||||
Content(role="user", parts=[Part.from_text(text=" ")]),
|
||||
)
|
||||
|
||||
if tool_results:
|
||||
messages.append(_create_google_tool_response_content(tool_results))
|
||||
generateContentConfig = GenerateContentConfig(
|
||||
temperature=self.entry.options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
),
|
||||
top_k=self.entry.options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
top_p=self.entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
|
||||
max_output_tokens=self.entry.options.get(
|
||||
CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS
|
||||
),
|
||||
safety_settings=[
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HATE_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_HARASSMENT_BLOCK_THRESHOLD,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
SafetySetting(
|
||||
category=HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold=self.entry.options.get(
|
||||
CONF_SEXUAL_BLOCK_THRESHOLD, RECOMMENDED_HARM_BLOCK_THRESHOLD
|
||||
),
|
||||
),
|
||||
],
|
||||
tools=tools or None,
|
||||
system_instruction=prompt if supports_system_instruction else None,
|
||||
automatic_function_calling=AutomaticFunctionCallingConfig(
|
||||
disable=True, maximum_remote_calls=None
|
||||
),
|
||||
)
|
||||
|
||||
if not supports_system_instruction:
|
||||
messages = [
|
||||
Content(role="user", parts=[Part.from_text(text=prompt)]),
|
||||
Content(role="model", parts=[Part.from_text(text="Ok")]),
|
||||
*messages,
|
||||
]
|
||||
chat = self._genai_client.aio.chats.create(
|
||||
model=model_name, history=messages, config=generateContentConfig
|
||||
)
|
||||
user_message = chat_log.content[-1]
|
||||
assert isinstance(user_message, conversation.UserContent)
|
||||
chat_request: str | list[Part] = user_message.content
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||
try:
|
||||
chat_response_generator = await chat.send_message_stream(
|
||||
message=chat_request
|
||||
)
|
||||
except (
|
||||
APIError,
|
||||
ClientError,
|
||||
ValueError,
|
||||
) as err:
|
||||
LOGGER.error("Error sending message: %s %s", type(err), err)
|
||||
error = ERROR_GETTING_RESPONSE
|
||||
raise HomeAssistantError(error) from err
|
||||
|
||||
chat_request = _create_google_tool_response_parts(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_response_generator),
|
||||
)
|
||||
if isinstance(content, conversation.ToolResultContent)
|
||||
]
|
||||
)
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
@@ -0,0 +1,216 @@
|
||||
"""Text to speech support for Google Generative AI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import io
|
||||
import logging
|
||||
from typing import Any
|
||||
import wave
|
||||
|
||||
from google.genai import types
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_VOICE,
|
||||
TextToSpeechEntity,
|
||||
TtsAudioType,
|
||||
Voice,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ATTR_MODEL, DOMAIN, RECOMMENDED_TTS_MODEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up TTS entity."""
|
||||
tts_entity = GoogleGenerativeAITextToSpeechEntity(config_entry)
|
||||
async_add_entities([tts_entity])
|
||||
|
||||
|
||||
class GoogleGenerativeAITextToSpeechEntity(TextToSpeechEntity):
|
||||
"""Google Generative AI text-to-speech entity."""
|
||||
|
||||
_attr_supported_options = [ATTR_VOICE, ATTR_MODEL]
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#languages
|
||||
_attr_supported_languages = [
|
||||
"ar-EG",
|
||||
"bn-BD",
|
||||
"de-DE",
|
||||
"en-IN",
|
||||
"en-US",
|
||||
"es-US",
|
||||
"fr-FR",
|
||||
"hi-IN",
|
||||
"id-ID",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"ko-KR",
|
||||
"mr-IN",
|
||||
"nl-NL",
|
||||
"pl-PL",
|
||||
"pt-BR",
|
||||
"ro-RO",
|
||||
"ru-RU",
|
||||
"ta-IN",
|
||||
"te-IN",
|
||||
"th-TH",
|
||||
"tr-TR",
|
||||
"uk-UA",
|
||||
"vi-VN",
|
||||
]
|
||||
_attr_default_language = "en-US"
|
||||
# See https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
||||
_supported_voices = [
|
||||
Voice(voice.split(" ", 1)[0].lower(), voice)
|
||||
for voice in (
|
||||
"Zephyr (Bright)",
|
||||
"Puck (Upbeat)",
|
||||
"Charon (Informative)",
|
||||
"Kore (Firm)",
|
||||
"Fenrir (Excitable)",
|
||||
"Leda (Youthful)",
|
||||
"Orus (Firm)",
|
||||
"Aoede (Breezy)",
|
||||
"Callirrhoe (Easy-going)",
|
||||
"Autonoe (Bright)",
|
||||
"Enceladus (Breathy)",
|
||||
"Iapetus (Clear)",
|
||||
"Umbriel (Easy-going)",
|
||||
"Algieba (Smooth)",
|
||||
"Despina (Smooth)",
|
||||
"Erinome (Clear)",
|
||||
"Algenib (Gravelly)",
|
||||
"Rasalgethi (Informative)",
|
||||
"Laomedeia (Upbeat)",
|
||||
"Achernar (Soft)",
|
||||
"Alnilam (Firm)",
|
||||
"Schedar (Even)",
|
||||
"Gacrux (Mature)",
|
||||
"Pulcherrima (Forward)",
|
||||
"Achird (Friendly)",
|
||||
"Zubenelgenubi (Casual)",
|
||||
"Vindemiatrix (Gentle)",
|
||||
"Sadachbia (Lively)",
|
||||
"Sadaltager (Knowledgeable)",
|
||||
"Sulafat (Warm)",
|
||||
)
|
||||
]
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize Google Generative AI Conversation speech entity."""
|
||||
self.entry = entry
|
||||
self._attr_name = "Google Generative AI TTS"
|
||||
self._attr_unique_id = f"{entry.entry_id}_tts"
|
||||
self._attr_device_info = dr.DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.entry_id)},
|
||||
name=entry.title,
|
||||
manufacturer="Google",
|
||||
model="Generative AI",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
self._genai_client = entry.runtime_data
|
||||
self._default_voice_id = self._supported_voices[0].voice_id
|
||||
|
||||
@callback
|
||||
def async_get_supported_voices(self, language: str) -> list[Voice] | None:
|
||||
"""Return a list of supported voices for a language."""
|
||||
return self._supported_voices
|
||||
|
||||
async def async_get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
) -> TtsAudioType:
|
||||
"""Load tts audio file from the engine."""
|
||||
try:
|
||||
response = self._genai_client.models.generate_content(
|
||||
model=options.get(ATTR_MODEL, RECOMMENDED_TTS_MODEL),
|
||||
contents=message,
|
||||
config=types.GenerateContentConfig(
|
||||
response_modalities=["AUDIO"],
|
||||
speech_config=types.SpeechConfig(
|
||||
voice_config=types.VoiceConfig(
|
||||
prebuilt_voice_config=types.PrebuiltVoiceConfig(
|
||||
voice_name=options.get(
|
||||
ATTR_VOICE, self._default_voice_id
|
||||
)
|
||||
)
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
data = response.candidates[0].content.parts[0].inline_data.data
|
||||
mime_type = response.candidates[0].content.parts[0].inline_data.mime_type
|
||||
except Exception as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
)
|
||||
raise HomeAssistantError(exc) from exc
|
||||
return "wav", self._convert_to_wav(data, mime_type)
|
||||
|
||||
def _convert_to_wav(self, audio_data: bytes, mime_type: str) -> bytes:
|
||||
"""Generate a WAV file header for the given audio data and parameters.
|
||||
|
||||
Args:
|
||||
audio_data: The raw audio data as a bytes object.
|
||||
mime_type: Mime type of the audio data.
|
||||
|
||||
Returns:
|
||||
A bytes object representing the WAV file header.
|
||||
|
||||
"""
|
||||
parameters = self._parse_audio_mime_type(mime_type)
|
||||
|
||||
wav_buffer = io.BytesIO()
|
||||
with wave.open(wav_buffer, "wb") as wf:
|
||||
wf.setnchannels(1)
|
||||
wf.setsampwidth(parameters["bits_per_sample"] // 8)
|
||||
wf.setframerate(parameters["rate"])
|
||||
wf.writeframes(audio_data)
|
||||
|
||||
return wav_buffer.getvalue()
|
||||
|
||||
def _parse_audio_mime_type(self, mime_type: str) -> dict[str, int]:
|
||||
"""Parse bits per sample and rate from an audio MIME type string.
|
||||
|
||||
Assumes bits per sample is encoded like "L16" and rate as "rate=xxxxx".
|
||||
|
||||
Args:
|
||||
mime_type: The audio MIME type string (e.g., "audio/L16;rate=24000").
|
||||
|
||||
Returns:
|
||||
A dictionary with "bits_per_sample" and "rate" keys. Values will be
|
||||
integers if found, otherwise None.
|
||||
|
||||
"""
|
||||
if not mime_type.startswith("audio/L"):
|
||||
_LOGGER.warning("Received unexpected MIME type %s", mime_type)
|
||||
raise HomeAssistantError(f"Unsupported audio MIME type: {mime_type}")
|
||||
|
||||
bits_per_sample = 16
|
||||
rate = 24000
|
||||
|
||||
# Extract rate from parameters
|
||||
parts = mime_type.split(";")
|
||||
for param in parts: # Skip the main type part
|
||||
param = param.strip()
|
||||
if param.lower().startswith("rate="):
|
||||
# Handle cases like "rate=" with no value or non-integer value and keep rate as default
|
||||
with suppress(ValueError, IndexError):
|
||||
rate_str = param.split("=", 1)[1]
|
||||
rate = int(rate_str)
|
||||
elif param.startswith("audio/L"):
|
||||
# Keep bits_per_sample as default if conversion fails
|
||||
with suppress(ValueError, IndexError):
|
||||
bits_per_sample = int(param.split("L", 1)[1])
|
||||
|
||||
return {"bits_per_sample": bits_per_sample, "rate": rate}
|
||||
@@ -27,7 +27,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Google Mail integration."""
|
||||
hass.data.setdefault(DOMAIN, {})[DATA_HASS_CONFIG] = config
|
||||
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING
|
||||
from googleapiclient.http import HttpRequest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service import async_extract_config_entry_ids
|
||||
|
||||
@@ -46,56 +46,57 @@ SERVICE_VACATION_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async def _extract_gmail_config_entries(
|
||||
call: ServiceCall,
|
||||
) -> list[GoogleMailConfigEntry]:
|
||||
return [
|
||||
entry
|
||||
for entry_id in await async_extract_config_entry_ids(call.hass, call)
|
||||
if (entry := call.hass.config_entries.async_get_entry(entry_id))
|
||||
and entry.domain == DOMAIN
|
||||
]
|
||||
|
||||
|
||||
async def _gmail_service(call: ServiceCall) -> None:
|
||||
"""Call Google Mail service."""
|
||||
for entry in await _extract_gmail_config_entries(call):
|
||||
try:
|
||||
auth = entry.runtime_data
|
||||
except AttributeError as ex:
|
||||
raise ValueError(f"Config entry not loaded: {entry.entry_id}") from ex
|
||||
service = await auth.get_resource()
|
||||
|
||||
_settings = {
|
||||
"enableAutoReply": call.data[ATTR_ENABLED],
|
||||
"responseSubject": call.data.get(ATTR_TITLE),
|
||||
}
|
||||
if contacts := call.data.get(ATTR_RESTRICT_CONTACTS):
|
||||
_settings["restrictToContacts"] = contacts
|
||||
if domain := call.data.get(ATTR_RESTRICT_DOMAIN):
|
||||
_settings["restrictToDomain"] = domain
|
||||
if _date := call.data.get(ATTR_START):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["startTime"] = _dt.timestamp() * 1000
|
||||
if _date := call.data.get(ATTR_END):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["endTime"] = (_dt + timedelta(days=1)).timestamp() * 1000
|
||||
if call.data[ATTR_PLAIN_TEXT]:
|
||||
_settings["responseBodyPlainText"] = call.data[ATTR_MESSAGE]
|
||||
else:
|
||||
_settings["responseBodyHtml"] = call.data[ATTR_MESSAGE]
|
||||
settings: HttpRequest = (
|
||||
service.users().settings().updateVacation(userId=ATTR_ME, body=_settings)
|
||||
)
|
||||
await call.hass.async_add_executor_job(settings.execute)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Google Mail integration."""
|
||||
|
||||
async def extract_gmail_config_entries(
|
||||
call: ServiceCall,
|
||||
) -> list[GoogleMailConfigEntry]:
|
||||
return [
|
||||
entry
|
||||
for entry_id in await async_extract_config_entry_ids(hass, call)
|
||||
if (entry := hass.config_entries.async_get_entry(entry_id))
|
||||
and entry.domain == DOMAIN
|
||||
]
|
||||
|
||||
async def gmail_service(call: ServiceCall) -> None:
|
||||
"""Call Google Mail service."""
|
||||
for entry in await extract_gmail_config_entries(call):
|
||||
try:
|
||||
auth = entry.runtime_data
|
||||
except AttributeError as ex:
|
||||
raise ValueError(f"Config entry not loaded: {entry.entry_id}") from ex
|
||||
service = await auth.get_resource()
|
||||
|
||||
_settings = {
|
||||
"enableAutoReply": call.data[ATTR_ENABLED],
|
||||
"responseSubject": call.data.get(ATTR_TITLE),
|
||||
}
|
||||
if contacts := call.data.get(ATTR_RESTRICT_CONTACTS):
|
||||
_settings["restrictToContacts"] = contacts
|
||||
if domain := call.data.get(ATTR_RESTRICT_DOMAIN):
|
||||
_settings["restrictToDomain"] = domain
|
||||
if _date := call.data.get(ATTR_START):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["startTime"] = _dt.timestamp() * 1000
|
||||
if _date := call.data.get(ATTR_END):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["endTime"] = (_dt + timedelta(days=1)).timestamp() * 1000
|
||||
if call.data[ATTR_PLAIN_TEXT]:
|
||||
_settings["responseBodyPlainText"] = call.data[ATTR_MESSAGE]
|
||||
else:
|
||||
_settings["responseBodyHtml"] = call.data[ATTR_MESSAGE]
|
||||
settings: HttpRequest = (
|
||||
service.users()
|
||||
.settings()
|
||||
.updateVacation(userId=ATTR_ME, body=_settings)
|
||||
)
|
||||
await hass.async_add_executor_job(settings.execute)
|
||||
|
||||
hass.services.async_register(
|
||||
domain=DOMAIN,
|
||||
service=SERVICE_SET_VACATION,
|
||||
schema=SERVICE_VACATION_SCHEMA,
|
||||
service_func=gmail_service,
|
||||
service_func=_gmail_service,
|
||||
)
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -77,85 +78,85 @@ def _read_file_contents(
|
||||
return results
|
||||
|
||||
|
||||
async def _async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
config_entry: GooglePhotosConfigEntry | None = (
|
||||
call.hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
|
||||
)
|
||||
if not config_entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
scopes = config_entry.data["token"]["scope"].split(" ")
|
||||
if UPLOAD_SCOPE not in scopes:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_upload_permission",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
client_api = coordinator.client
|
||||
upload_tasks = []
|
||||
file_results = await call.hass.async_add_executor_job(
|
||||
_read_file_contents, call.hass, call.data[CONF_FILENAME]
|
||||
)
|
||||
|
||||
album = call.data[CONF_ALBUM]
|
||||
try:
|
||||
album_id = await coordinator.get_or_create_album(album)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="create_album_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
|
||||
for mime_type, content in file_results:
|
||||
upload_tasks.append(client_api.upload_content(content, mime_type))
|
||||
try:
|
||||
upload_results = await asyncio.gather(*upload_tasks)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="upload_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
try:
|
||||
upload_result = await client_api.create_media_items(
|
||||
[
|
||||
NewMediaItem(SimpleMediaItem(upload_token=upload_result.upload_token))
|
||||
for upload_result in upload_results
|
||||
],
|
||||
album_id=album_id,
|
||||
)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {
|
||||
"media_items": [
|
||||
{"media_item_id": item_result.media_item.id}
|
||||
for item_result in upload_result.new_media_item_results
|
||||
if item_result.media_item and item_result.media_item.id
|
||||
],
|
||||
"album_id": album_id,
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register Google Photos services."""
|
||||
|
||||
async def async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
config_entry: GooglePhotosConfigEntry | None = (
|
||||
hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID])
|
||||
)
|
||||
if not config_entry:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
scopes = config_entry.data["token"]["scope"].split(" ")
|
||||
if UPLOAD_SCOPE not in scopes:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_upload_permission",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
client_api = coordinator.client
|
||||
upload_tasks = []
|
||||
file_results = await hass.async_add_executor_job(
|
||||
_read_file_contents, hass, call.data[CONF_FILENAME]
|
||||
)
|
||||
|
||||
album = call.data[CONF_ALBUM]
|
||||
try:
|
||||
album_id = await coordinator.get_or_create_album(album)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="create_album_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
|
||||
for mime_type, content in file_results:
|
||||
upload_tasks.append(client_api.upload_content(content, mime_type))
|
||||
try:
|
||||
upload_results = await asyncio.gather(*upload_tasks)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="upload_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
try:
|
||||
upload_result = await client_api.create_media_items(
|
||||
[
|
||||
NewMediaItem(
|
||||
SimpleMediaItem(upload_token=upload_result.upload_token)
|
||||
)
|
||||
for upload_result in upload_results
|
||||
],
|
||||
album_id=album_id,
|
||||
)
|
||||
except GooglePhotosApiError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": str(err)},
|
||||
) from err
|
||||
if call.return_response:
|
||||
return {
|
||||
"media_items": [
|
||||
{"media_item_id": item_result.media_item.id}
|
||||
for item_result in upload_result.new_media_item_results
|
||||
if item_result.media_item and item_result.media_item.id
|
||||
],
|
||||
"album_id": album_id,
|
||||
}
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
UPLOAD_SERVICE,
|
||||
async_handle_upload,
|
||||
_async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -13,7 +13,7 @@ from gspread.utils import ValueInputOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -76,6 +76,7 @@ async def _async_append_to_sheet(call: ServiceCall) -> None:
|
||||
await call.hass.async_add_executor_job(_append_to_sheet, call, entry)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Sheets."""
|
||||
|
||||
|
||||
@@ -2,7 +2,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -5,7 +5,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -35,6 +35,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -249,6 +250,7 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
return entry
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
"""Set up services for Habitica integration."""
|
||||
|
||||
|
||||
@@ -9,8 +9,10 @@ from functools import partial
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
import aiofiles
|
||||
from aiohasupervisor import SupervisorError
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -56,7 +58,6 @@ from homeassistant.helpers.issue_registry import IssueSeverity
|
||||
from homeassistant.helpers.service_info.hassio import (
|
||||
HassioServiceInfo as _HassioServiceInfo,
|
||||
)
|
||||
from homeassistant.helpers.system_info import async_get_system_info
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.async_ import create_eager_task
|
||||
@@ -233,6 +234,17 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||
)
|
||||
|
||||
|
||||
def _is_32_bit() -> bool:
|
||||
size = struct.calcsize("P")
|
||||
return size * 8 == 32
|
||||
|
||||
|
||||
async def _get_arch() -> str:
|
||||
async with aiofiles.open("/etc/apk/arch") as arch_file:
|
||||
raw_arch = await arch_file.read()
|
||||
return {"x86": "i386"}.get(raw_arch, raw_arch)
|
||||
|
||||
|
||||
class APIEndpointSettings(NamedTuple):
|
||||
"""Settings for API endpoint."""
|
||||
|
||||
@@ -554,7 +566,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[ADDONS_COORDINATOR] = coordinator
|
||||
|
||||
system_info = await async_get_system_info(hass)
|
||||
arch = await _get_arch()
|
||||
|
||||
def deprecated_setup_issue() -> None:
|
||||
os_info = get_os_info(hass)
|
||||
@@ -562,20 +574,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
if os_info is None or info is None:
|
||||
return
|
||||
is_haos = info.get("hassos") is not None
|
||||
arch = system_info["arch"]
|
||||
board = os_info.get("board")
|
||||
supported_board = board in {"rpi3", "rpi4", "tinker", "odroid-xu4", "rpi2"}
|
||||
if is_haos and arch == "armv7" and supported_board:
|
||||
unsupported_board = board in {"tinker", "odroid-xu4", "rpi2"}
|
||||
unsupported_os_on_board = board in {"rpi3", "rpi4"}
|
||||
if is_haos and (unsupported_board or unsupported_os_on_board):
|
||||
issue_id = "deprecated_os_"
|
||||
if board in {"rpi3", "rpi4"}:
|
||||
if unsupported_os_on_board:
|
||||
issue_id += "aarch64"
|
||||
elif board in {"tinker", "odroid-xu4", "rpi2"}:
|
||||
elif unsupported_board:
|
||||
issue_id += "armv7"
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
"homeassistant",
|
||||
issue_id,
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
@@ -584,9 +595,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"installation_guide": "https://www.home-assistant.io/installation/",
|
||||
},
|
||||
)
|
||||
deprecated_architecture = False
|
||||
if arch in {"i386", "armhf"} or (arch == "armv7" and not supported_board):
|
||||
deprecated_architecture = True
|
||||
bit32 = _is_32_bit()
|
||||
deprecated_architecture = bit32 and not (
|
||||
unsupported_board or unsupported_os_on_board
|
||||
)
|
||||
if not is_haos or deprecated_architecture:
|
||||
issue_id = "deprecated"
|
||||
if not is_haos:
|
||||
@@ -597,7 +609,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass,
|
||||
"homeassistant",
|
||||
issue_id,
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
|
||||
@@ -5,26 +5,13 @@ from __future__ import annotations
|
||||
from homeassistant.const import CONF_API_KEY, CONF_MODE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.start import async_at_started
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
CONF_ARRIVAL_TIME,
|
||||
CONF_DEPARTURE_TIME,
|
||||
CONF_DESTINATION_ENTITY_ID,
|
||||
CONF_DESTINATION_LATITUDE,
|
||||
CONF_DESTINATION_LONGITUDE,
|
||||
CONF_ORIGIN_ENTITY_ID,
|
||||
CONF_ORIGIN_LATITUDE,
|
||||
CONF_ORIGIN_LONGITUDE,
|
||||
CONF_ROUTE_MODE,
|
||||
TRAVEL_MODE_PUBLIC,
|
||||
)
|
||||
from .const import TRAVEL_MODE_PUBLIC
|
||||
from .coordinator import (
|
||||
HereConfigEntry,
|
||||
HERERoutingDataUpdateCoordinator,
|
||||
HERETransitDataUpdateCoordinator,
|
||||
)
|
||||
from .model import HERETravelTimeConfig
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
@@ -33,29 +20,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: HereConfigEntry)
|
||||
"""Set up HERE Travel Time from a config entry."""
|
||||
api_key = config_entry.data[CONF_API_KEY]
|
||||
|
||||
arrival = dt_util.parse_time(config_entry.options.get(CONF_ARRIVAL_TIME, ""))
|
||||
departure = dt_util.parse_time(config_entry.options.get(CONF_DEPARTURE_TIME, ""))
|
||||
|
||||
here_travel_time_config = HERETravelTimeConfig(
|
||||
destination_latitude=config_entry.data.get(CONF_DESTINATION_LATITUDE),
|
||||
destination_longitude=config_entry.data.get(CONF_DESTINATION_LONGITUDE),
|
||||
destination_entity_id=config_entry.data.get(CONF_DESTINATION_ENTITY_ID),
|
||||
origin_latitude=config_entry.data.get(CONF_ORIGIN_LATITUDE),
|
||||
origin_longitude=config_entry.data.get(CONF_ORIGIN_LONGITUDE),
|
||||
origin_entity_id=config_entry.data.get(CONF_ORIGIN_ENTITY_ID),
|
||||
travel_mode=config_entry.data[CONF_MODE],
|
||||
route_mode=config_entry.options[CONF_ROUTE_MODE],
|
||||
arrival=arrival,
|
||||
departure=departure,
|
||||
)
|
||||
|
||||
cls: type[HERETransitDataUpdateCoordinator | HERERoutingDataUpdateCoordinator]
|
||||
if config_entry.data[CONF_MODE] in {TRAVEL_MODE_PUBLIC, "publicTransportTimeTable"}:
|
||||
cls = HERETransitDataUpdateCoordinator
|
||||
else:
|
||||
cls = HERERoutingDataUpdateCoordinator
|
||||
|
||||
data_coordinator = cls(hass, config_entry, api_key, here_travel_time_config)
|
||||
data_coordinator = cls(hass, config_entry, api_key)
|
||||
config_entry.runtime_data = data_coordinator
|
||||
|
||||
async def _async_update_at_start(_: HomeAssistant) -> None:
|
||||
|
||||
@@ -26,7 +26,7 @@ from here_transit import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfLength
|
||||
from homeassistant.const import CONF_MODE, UnitOfLength
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.location import find_coordinates
|
||||
@@ -34,8 +34,21 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import DistanceConverter
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, ROUTE_MODE_FASTEST
|
||||
from .model import HERETravelTimeConfig, HERETravelTimeData
|
||||
from .const import (
|
||||
CONF_ARRIVAL_TIME,
|
||||
CONF_DEPARTURE_TIME,
|
||||
CONF_DESTINATION_ENTITY_ID,
|
||||
CONF_DESTINATION_LATITUDE,
|
||||
CONF_DESTINATION_LONGITUDE,
|
||||
CONF_ORIGIN_ENTITY_ID,
|
||||
CONF_ORIGIN_LATITUDE,
|
||||
CONF_ORIGIN_LONGITUDE,
|
||||
CONF_ROUTE_MODE,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
ROUTE_MODE_FASTEST,
|
||||
)
|
||||
from .model import HERETravelTimeAPIParams, HERETravelTimeData
|
||||
|
||||
BACKOFF_MULTIPLIER = 1.1
|
||||
|
||||
@@ -47,7 +60,7 @@ type HereConfigEntry = ConfigEntry[
|
||||
|
||||
|
||||
class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]):
|
||||
"""here_routing DataUpdateCoordinator."""
|
||||
"""HERETravelTime DataUpdateCoordinator for the routing API."""
|
||||
|
||||
config_entry: HereConfigEntry
|
||||
|
||||
@@ -56,7 +69,6 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
hass: HomeAssistant,
|
||||
config_entry: HereConfigEntry,
|
||||
api_key: str,
|
||||
config: HERETravelTimeConfig,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
@@ -67,41 +79,34 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
)
|
||||
self._api = HERERoutingApi(api_key)
|
||||
self.config = config
|
||||
|
||||
async def _async_update_data(self) -> HERETravelTimeData:
|
||||
"""Get the latest data from the HERE Routing API."""
|
||||
origin, destination, arrival, departure = prepare_parameters(
|
||||
self.hass, self.config
|
||||
)
|
||||
|
||||
route_mode = (
|
||||
RoutingMode.FAST
|
||||
if self.config.route_mode == ROUTE_MODE_FASTEST
|
||||
else RoutingMode.SHORT
|
||||
)
|
||||
params = prepare_parameters(self.hass, self.config_entry)
|
||||
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Requesting route for origin: %s, destination: %s, route_mode: %s,"
|
||||
" mode: %s, arrival: %s, departure: %s"
|
||||
),
|
||||
origin,
|
||||
destination,
|
||||
route_mode,
|
||||
TransportMode(self.config.travel_mode),
|
||||
arrival,
|
||||
departure,
|
||||
params.origin,
|
||||
params.destination,
|
||||
params.route_mode,
|
||||
TransportMode(params.travel_mode),
|
||||
params.arrival,
|
||||
params.departure,
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self._api.route(
|
||||
transport_mode=TransportMode(self.config.travel_mode),
|
||||
origin=here_routing.Place(origin[0], origin[1]),
|
||||
destination=here_routing.Place(destination[0], destination[1]),
|
||||
routing_mode=route_mode,
|
||||
arrival_time=arrival,
|
||||
departure_time=departure,
|
||||
transport_mode=TransportMode(params.travel_mode),
|
||||
origin=here_routing.Place(params.origin[0], params.origin[1]),
|
||||
destination=here_routing.Place(
|
||||
params.destination[0], params.destination[1]
|
||||
),
|
||||
routing_mode=params.route_mode,
|
||||
arrival_time=params.arrival,
|
||||
departure_time=params.departure,
|
||||
return_values=[Return.POLYINE, Return.SUMMARY],
|
||||
spans=[Spans.NAMES],
|
||||
)
|
||||
@@ -175,7 +180,7 @@ class HERERoutingDataUpdateCoordinator(DataUpdateCoordinator[HERETravelTimeData]
|
||||
class HERETransitDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[HERETravelTimeData | None]
|
||||
):
|
||||
"""HERETravelTime DataUpdateCoordinator."""
|
||||
"""HERETravelTime DataUpdateCoordinator for the transit API."""
|
||||
|
||||
config_entry: HereConfigEntry
|
||||
|
||||
@@ -184,7 +189,6 @@ class HERETransitDataUpdateCoordinator(
|
||||
hass: HomeAssistant,
|
||||
config_entry: HereConfigEntry,
|
||||
api_key: str,
|
||||
config: HERETravelTimeConfig,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
@@ -195,32 +199,31 @@ class HERETransitDataUpdateCoordinator(
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
)
|
||||
self._api = HERETransitApi(api_key)
|
||||
self.config = config
|
||||
|
||||
async def _async_update_data(self) -> HERETravelTimeData | None:
|
||||
"""Get the latest data from the HERE Routing API."""
|
||||
origin, destination, arrival, departure = prepare_parameters(
|
||||
self.hass, self.config
|
||||
)
|
||||
params = prepare_parameters(self.hass, self.config_entry)
|
||||
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Requesting transit route for origin: %s, destination: %s, arrival: %s,"
|
||||
" departure: %s"
|
||||
),
|
||||
origin,
|
||||
destination,
|
||||
arrival,
|
||||
departure,
|
||||
params.origin,
|
||||
params.destination,
|
||||
params.arrival,
|
||||
params.departure,
|
||||
)
|
||||
try:
|
||||
response = await self._api.route(
|
||||
origin=here_transit.Place(latitude=origin[0], longitude=origin[1]),
|
||||
destination=here_transit.Place(
|
||||
latitude=destination[0], longitude=destination[1]
|
||||
origin=here_transit.Place(
|
||||
latitude=params.origin[0], longitude=params.origin[1]
|
||||
),
|
||||
arrival_time=arrival,
|
||||
departure_time=departure,
|
||||
destination=here_transit.Place(
|
||||
latitude=params.destination[0], longitude=params.destination[1]
|
||||
),
|
||||
arrival_time=params.arrival,
|
||||
departure_time=params.departure,
|
||||
return_values=[
|
||||
here_transit.Return.POLYLINE,
|
||||
here_transit.Return.TRAVEL_SUMMARY,
|
||||
@@ -285,8 +288,8 @@ class HERETransitDataUpdateCoordinator(
|
||||
|
||||
def prepare_parameters(
|
||||
hass: HomeAssistant,
|
||||
config: HERETravelTimeConfig,
|
||||
) -> tuple[list[str], list[str], str | None, str | None]:
|
||||
config_entry: HereConfigEntry,
|
||||
) -> HERETravelTimeAPIParams:
|
||||
"""Prepare parameters for the HERE api."""
|
||||
|
||||
def _from_entity_id(entity_id: str) -> list[str]:
|
||||
@@ -305,32 +308,55 @@ def prepare_parameters(
|
||||
return formatted_coordinates
|
||||
|
||||
# Destination
|
||||
if config.destination_entity_id is not None:
|
||||
destination = _from_entity_id(config.destination_entity_id)
|
||||
if (
|
||||
destination_entity_id := config_entry.data.get(CONF_DESTINATION_ENTITY_ID)
|
||||
) is not None:
|
||||
destination = _from_entity_id(str(destination_entity_id))
|
||||
else:
|
||||
destination = [
|
||||
str(config.destination_latitude),
|
||||
str(config.destination_longitude),
|
||||
str(config_entry.data[CONF_DESTINATION_LATITUDE]),
|
||||
str(config_entry.data[CONF_DESTINATION_LONGITUDE]),
|
||||
]
|
||||
|
||||
# Origin
|
||||
if config.origin_entity_id is not None:
|
||||
origin = _from_entity_id(config.origin_entity_id)
|
||||
if (origin_entity_id := config_entry.data.get(CONF_ORIGIN_ENTITY_ID)) is not None:
|
||||
origin = _from_entity_id(str(origin_entity_id))
|
||||
else:
|
||||
origin = [
|
||||
str(config.origin_latitude),
|
||||
str(config.origin_longitude),
|
||||
str(config_entry.data[CONF_ORIGIN_LATITUDE]),
|
||||
str(config_entry.data[CONF_ORIGIN_LONGITUDE]),
|
||||
]
|
||||
|
||||
# Arrival/Departure
|
||||
arrival: str | None = None
|
||||
departure: str | None = None
|
||||
if config.arrival is not None:
|
||||
arrival = next_datetime(config.arrival).isoformat()
|
||||
if config.departure is not None:
|
||||
departure = next_datetime(config.departure).isoformat()
|
||||
arrival: datetime | None = None
|
||||
if (
|
||||
conf_arrival := dt_util.parse_time(
|
||||
config_entry.options.get(CONF_ARRIVAL_TIME, "")
|
||||
)
|
||||
) is not None:
|
||||
arrival = next_datetime(conf_arrival)
|
||||
departure: datetime | None = None
|
||||
if (
|
||||
conf_departure := dt_util.parse_time(
|
||||
config_entry.options.get(CONF_DEPARTURE_TIME, "")
|
||||
)
|
||||
) is not None:
|
||||
departure = next_datetime(conf_departure)
|
||||
|
||||
return (origin, destination, arrival, departure)
|
||||
route_mode = (
|
||||
RoutingMode.FAST
|
||||
if config_entry.options[CONF_ROUTE_MODE] == ROUTE_MODE_FASTEST
|
||||
else RoutingMode.SHORT
|
||||
)
|
||||
|
||||
return HERETravelTimeAPIParams(
|
||||
destination=destination,
|
||||
origin=origin,
|
||||
travel_mode=config_entry.data[CONF_MODE],
|
||||
route_mode=route_mode,
|
||||
arrival=arrival,
|
||||
departure=departure,
|
||||
)
|
||||
|
||||
|
||||
def build_hass_attribution(sections: list[dict[str, Any]]) -> str | None:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import time
|
||||
from datetime import datetime
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
@@ -21,16 +21,12 @@ class HERETravelTimeData(TypedDict):
|
||||
|
||||
|
||||
@dataclass
|
||||
class HERETravelTimeConfig:
|
||||
"""Configuration for HereTravelTimeDataUpdateCoordinator."""
|
||||
class HERETravelTimeAPIParams:
|
||||
"""Configuration for polling the HERE API."""
|
||||
|
||||
destination_latitude: float | None
|
||||
destination_longitude: float | None
|
||||
destination_entity_id: str | None
|
||||
origin_latitude: float | None
|
||||
origin_longitude: float | None
|
||||
origin_entity_id: str | None
|
||||
destination: list[str]
|
||||
origin: list[str]
|
||||
travel_mode: str
|
||||
route_mode: str
|
||||
arrival: time | None
|
||||
departure: time | None
|
||||
arrival: datetime | None
|
||||
departure: datetime | None
|
||||
|
||||
@@ -9,7 +9,13 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
|
||||
@@ -4,8 +4,10 @@ import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import itertools as it
|
||||
import logging
|
||||
import struct
|
||||
from typing import Any
|
||||
|
||||
import aiofiles
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config as conf_util, core_config
|
||||
@@ -94,6 +96,17 @@ DEPRECATION_URL = (
|
||||
)
|
||||
|
||||
|
||||
def _is_32_bit() -> bool:
|
||||
size = struct.calcsize("P")
|
||||
return size * 8 == 32
|
||||
|
||||
|
||||
async def _get_arch() -> str:
|
||||
async with aiofiles.open("/etc/apk/arch") as arch_file:
|
||||
raw_arch = (await arch_file.read()).strip()
|
||||
return {"x86": "i386", "x86_64": "amd64"}.get(raw_arch, raw_arch)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901
|
||||
"""Set up general services related to Home Assistant."""
|
||||
|
||||
@@ -403,23 +416,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
installation_type = info["installation_type"][15:]
|
||||
if installation_type in {"Core", "Container"}:
|
||||
deprecated_method = installation_type == "Core"
|
||||
bit32 = _is_32_bit()
|
||||
arch = info["arch"]
|
||||
if arch == "armv7" and installation_type == "Container":
|
||||
if bit32 and installation_type == "Container":
|
||||
arch = await _get_arch()
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_container_armv7",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
"deprecated_container",
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_container_armv7",
|
||||
translation_key="deprecated_container",
|
||||
translation_placeholders={"arch": arch},
|
||||
)
|
||||
deprecated_architecture = False
|
||||
if arch in {"i386", "armhf"} or (
|
||||
arch == "armv7" and installation_type != "Container"
|
||||
):
|
||||
deprecated_architecture = True
|
||||
deprecated_architecture = bit32 and installation_type != "Container"
|
||||
if deprecated_method or deprecated_architecture:
|
||||
issue_id = "deprecated"
|
||||
if deprecated_method:
|
||||
@@ -430,7 +441,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
learn_more_url=DEPRECATION_URL,
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
|
||||
@@ -107,9 +107,9 @@
|
||||
"title": "Deprecation notice: 32-bit architecture",
|
||||
"description": "This system uses 32-bit hardware (`{arch}`), which has been deprecated and will no longer receive updates after the release of Home Assistant 2025.12. As your hardware is no longer capable of running newer versions of Home Assistant, you will need to migrate to new hardware."
|
||||
},
|
||||
"deprecated_container_armv7": {
|
||||
"deprecated_container": {
|
||||
"title": "[%key:component::homeassistant::issues::deprecated_architecture::title%]",
|
||||
"description": "This system is running on a 32-bit operating system (`armv7`), which has been deprecated and will no longer receive updates after the release of Home Assistant 2025.12. Check if your system is capable of running a 64-bit operating system. If not, you will need to migrate to new hardware."
|
||||
"description": "This system is running on a 32-bit operating system (`{arch}`), which has been deprecated and will no longer receive updates after the release of Home Assistant 2025.12. Check if your system is capable of running a 64-bit operating system. If not, you will need to migrate to new hardware."
|
||||
},
|
||||
"deprecated_os_aarch64": {
|
||||
"title": "[%key:component::homeassistant::issues::deprecated_architecture::title%]",
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
"""The homee event platform."""
|
||||
|
||||
from pyHomee.const import AttributeType
|
||||
from pyHomee.const import AttributeType, NodeProfile
|
||||
from pyHomee.model import HomeeAttribute
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.components.event import (
|
||||
EventDeviceClass,
|
||||
EventEntity,
|
||||
EventEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -13,6 +17,38 @@ from .entity import HomeeEntity
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
REMOTE_PROFILES = [
|
||||
NodeProfile.REMOTE,
|
||||
NodeProfile.TWO_BUTTON_REMOTE,
|
||||
NodeProfile.THREE_BUTTON_REMOTE,
|
||||
NodeProfile.FOUR_BUTTON_REMOTE,
|
||||
]
|
||||
|
||||
EVENT_DESCRIPTIONS: dict[AttributeType, EventEntityDescription] = {
|
||||
AttributeType.BUTTON_STATE: EventEntityDescription(
|
||||
key="button_state",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
event_types=["upper", "lower", "released"],
|
||||
),
|
||||
AttributeType.UP_DOWN_REMOTE: EventEntityDescription(
|
||||
key="up_down_remote",
|
||||
device_class=EventDeviceClass.BUTTON,
|
||||
event_types=[
|
||||
"released",
|
||||
"up",
|
||||
"down",
|
||||
"stop",
|
||||
"up_long",
|
||||
"down_long",
|
||||
"stop_long",
|
||||
"c_button",
|
||||
"b_button",
|
||||
"a_button",
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: HomeeConfigEntry,
|
||||
@@ -21,30 +57,31 @@ async def async_setup_entry(
|
||||
"""Add event entities for homee."""
|
||||
|
||||
async_add_entities(
|
||||
HomeeEvent(attribute, config_entry)
|
||||
HomeeEvent(attribute, config_entry, EVENT_DESCRIPTIONS[attribute.type])
|
||||
for node in config_entry.runtime_data.nodes
|
||||
for attribute in node.attributes
|
||||
if attribute.type == AttributeType.UP_DOWN_REMOTE
|
||||
if attribute.type in EVENT_DESCRIPTIONS
|
||||
and node.profile in REMOTE_PROFILES
|
||||
and not attribute.editable
|
||||
)
|
||||
|
||||
|
||||
class HomeeEvent(HomeeEntity, EventEntity):
|
||||
"""Representation of a homee event."""
|
||||
|
||||
_attr_translation_key = "up_down_remote"
|
||||
_attr_event_types = [
|
||||
"released",
|
||||
"up",
|
||||
"down",
|
||||
"stop",
|
||||
"up_long",
|
||||
"down_long",
|
||||
"stop_long",
|
||||
"c_button",
|
||||
"b_button",
|
||||
"a_button",
|
||||
]
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
def __init__(
|
||||
self,
|
||||
attribute: HomeeAttribute,
|
||||
entry: HomeeConfigEntry,
|
||||
description: EventEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the homee event entity."""
|
||||
super().__init__(attribute, entry)
|
||||
self.entity_description = description
|
||||
self._attr_translation_key = description.key
|
||||
if attribute.instance > 0:
|
||||
self._attr_translation_key = f"{self._attr_translation_key}_instance"
|
||||
self._attr_translation_placeholders = {"instance": str(attribute.instance)}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Add the homee event entity to home assistant."""
|
||||
@@ -56,6 +93,5 @@ class HomeeEvent(HomeeEntity, EventEntity):
|
||||
@callback
|
||||
def _event_triggered(self, event: HomeeAttribute) -> None:
|
||||
"""Handle a homee event."""
|
||||
if event.type == AttributeType.UP_DOWN_REMOTE:
|
||||
self._trigger_event(self.event_types[int(event.current_value)])
|
||||
self.schedule_update_ha_state()
|
||||
self._trigger_event(self.event_types[int(event.current_value)])
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -160,12 +160,36 @@
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button_state": {
|
||||
"name": "Switch",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"upper": "Upper button",
|
||||
"lower": "Lower button",
|
||||
"released": "Released"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"button_state_instance": {
|
||||
"name": "Switch {instance}",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"upper": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::upper%]",
|
||||
"lower": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::lower%]",
|
||||
"released": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::released%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"up_down_remote": {
|
||||
"name": "Up/down remote",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"release": "Released",
|
||||
"release": "[%key;component::homee::entity::event::button_state::state_attributes::event_type::state::released%]",
|
||||
"up": "Up",
|
||||
"down": "Down",
|
||||
"stop": "Stop",
|
||||
|
||||
@@ -63,7 +63,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -216,8 +216,6 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode not in self.hvac_modes:
|
||||
return
|
||||
|
||||
if hvac_mode == HVACMode.AUTO:
|
||||
await self._device.set_control_mode_async(HMIP_AUTOMATIC_CM)
|
||||
|
||||
@@ -128,6 +128,7 @@ class HomematicipHAP:
|
||||
self.config_entry.data.get(HMIPC_AUTHTOKEN),
|
||||
self.config_entry.data.get(HMIPC_NAME),
|
||||
)
|
||||
|
||||
except HmipcConnectionError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except Exception as err: # noqa: BLE001
|
||||
@@ -210,41 +211,13 @@ class HomematicipHAP:
|
||||
for device in self.home.devices:
|
||||
device.fire_update_event()
|
||||
|
||||
async def async_connect(self) -> None:
|
||||
"""Start WebSocket connection."""
|
||||
tries = 0
|
||||
while True:
|
||||
retry_delay = 2 ** min(tries, 8)
|
||||
async def async_connect(self, home: AsyncHome) -> None:
|
||||
"""Connect to HomematicIP Cloud Websocket."""
|
||||
await home.enable_events()
|
||||
|
||||
try:
|
||||
await self.home.get_current_state_async()
|
||||
hmip_events = self.home.enable_events()
|
||||
self.home.set_on_connected_handler(self.ws_connected_handler)
|
||||
self.home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
tries = 0
|
||||
await hmip_events
|
||||
except HmipConnectionError:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error connecting to HomematicIP with HAP %s. "
|
||||
"Retrying in %d seconds"
|
||||
),
|
||||
self.config_entry.unique_id,
|
||||
retry_delay,
|
||||
)
|
||||
|
||||
if self._ws_close_requested:
|
||||
break
|
||||
self._ws_close_requested = False
|
||||
tries += 1
|
||||
|
||||
try:
|
||||
self._retry_task = self.hass.async_create_task(
|
||||
asyncio.sleep(retry_delay)
|
||||
)
|
||||
await self._retry_task
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
home.set_on_connected_handler(self.ws_connected_handler)
|
||||
home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
home.set_on_reconnect_handler(self.ws_reconnected_handler)
|
||||
|
||||
async def async_reset(self) -> bool:
|
||||
"""Close the websocket connection."""
|
||||
@@ -272,14 +245,22 @@ class HomematicipHAP:
|
||||
|
||||
async def ws_connected_handler(self) -> None:
|
||||
"""Handle websocket connected."""
|
||||
_LOGGER.debug("WebSocket connection to HomematicIP established")
|
||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||
if self._ws_connection_closed.is_set():
|
||||
await self.get_state()
|
||||
self._ws_connection_closed.clear()
|
||||
|
||||
async def ws_disconnected_handler(self) -> None:
|
||||
"""Handle websocket disconnection."""
|
||||
_LOGGER.warning("WebSocket connection to HomematicIP closed")
|
||||
_LOGGER.warning("Websocket connection to HomematicIP Cloud closed")
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||
"""Handle websocket reconnection."""
|
||||
_LOGGER.info(
|
||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
||||
reason,
|
||||
)
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def get_hap(
|
||||
@@ -306,6 +287,6 @@ class HomematicipHAP:
|
||||
home.on_update(self.async_update)
|
||||
home.on_create(self.async_create_entity)
|
||||
|
||||
hass.loop.create_task(self.async_connect())
|
||||
await self.async_connect(home)
|
||||
|
||||
return home
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.0.4"]
|
||||
"requirements": ["homematicip==2.0.5"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ from homematicip.group import HeatingGroup
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.config_validation import comp_entity_ids
|
||||
@@ -120,7 +120,8 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the HomematicIP Cloud services."""
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user