mirror of
https://github.com/esphome/esphome.git
synced 2025-07-30 15:16:37 +00:00
Merge branch 'dev' into integration
This commit is contained in:
commit
76a63e5d55
@ -1 +1 @@
|
||||
7920671c938a5ea6a11ac4594204b5ec8f38d579c962bf1f185e8d5e3ad879be
|
||||
32b0db73b3ae01ba18c9cbb1dabbd8156bc14dded500471919bd0a3dc33916e0
|
||||
|
591
.github/workflows/auto-label-pr.yml
vendored
591
.github/workflows/auto-label-pr.yml
vendored
@ -14,6 +14,7 @@ env:
|
||||
SMALL_PR_THRESHOLD: 30
|
||||
MAX_LABELS: 15
|
||||
TOO_BIG_THRESHOLD: 1000
|
||||
COMPONENT_LABEL_THRESHOLD: 10
|
||||
|
||||
jobs:
|
||||
label:
|
||||
@ -23,24 +24,6 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Get changes
|
||||
id: changes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Get PR number
|
||||
pr_number="${{ github.event.pull_request.number }}"
|
||||
|
||||
# Get list of changed files using gh CLI
|
||||
files=$(gh pr diff $pr_number --name-only)
|
||||
echo "files<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$files" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get file stats (additions + deletions) using gh CLI
|
||||
stats=$(gh pr view $pr_number --json files --jq '.files | map(.additions + .deletions) | add')
|
||||
echo "total_changes=${stats:-0}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v2
|
||||
@ -55,24 +38,16 @@ jobs:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
// Hidden marker to identify bot comments from this workflow
|
||||
// Constants
|
||||
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
|
||||
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
|
||||
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
|
||||
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
|
||||
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
|
||||
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
|
||||
const TOO_BIG_MARKER = '<!-- too-big-request -->';
|
||||
|
||||
// Get current labels
|
||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number
|
||||
});
|
||||
const currentLabels = currentLabelsData.map(label => label.name);
|
||||
|
||||
// Define managed labels that this workflow controls
|
||||
const managedLabels = currentLabels.filter(label =>
|
||||
label.startsWith('component: ') ||
|
||||
[
|
||||
const MANAGED_LABELS = [
|
||||
'new-component',
|
||||
'new-platform',
|
||||
'new-target-platform',
|
||||
@ -86,172 +61,157 @@ jobs:
|
||||
'has-tests',
|
||||
'needs-tests',
|
||||
'needs-docs',
|
||||
'needs-codeowners',
|
||||
'too-big',
|
||||
'labeller-recheck'
|
||||
].includes(label)
|
||||
];
|
||||
|
||||
const DOCS_PR_PATTERNS = [
|
||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||
/esphome\/esphome-docs#\d+/
|
||||
];
|
||||
|
||||
// Global state
|
||||
const { owner, repo } = context.repo;
|
||||
const pr_number = context.issue.number;
|
||||
|
||||
// Get current labels and PR data
|
||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number
|
||||
});
|
||||
const currentLabels = currentLabelsData.map(label => label.name);
|
||||
const managedLabels = currentLabels.filter(label =>
|
||||
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
||||
);
|
||||
|
||||
// Check for mega-PR early - if present, skip most automatic labeling
|
||||
const isMegaPR = currentLabels.includes('mega-pr');
|
||||
|
||||
// Get all PR files with automatic pagination
|
||||
const prFiles = await github.paginate(
|
||||
github.rest.pulls.listFiles,
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
}
|
||||
);
|
||||
|
||||
// Calculate data from PR files
|
||||
const changedFiles = prFiles.map(file => file.filename);
|
||||
const totalChanges = prFiles.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
|
||||
console.log('Current labels:', currentLabels.join(', '));
|
||||
console.log('Managed labels:', managedLabels.join(', '));
|
||||
|
||||
// Get changed files
|
||||
const changedFiles = `${{ steps.changes.outputs.files }}`.split('\n').filter(f => f.length > 0);
|
||||
const totalChanges = parseInt('${{ steps.changes.outputs.total_changes }}') || 0;
|
||||
|
||||
console.log('Changed files:', changedFiles.length);
|
||||
console.log('Total changes:', totalChanges);
|
||||
if (isMegaPR) {
|
||||
console.log('Mega-PR detected - applying limited labeling logic');
|
||||
}
|
||||
|
||||
const labels = new Set();
|
||||
|
||||
// Fetch TARGET_PLATFORMS and PLATFORM_COMPONENTS from API
|
||||
let targetPlatforms = [];
|
||||
let platformComponents = [];
|
||||
|
||||
// Fetch API data
|
||||
async function fetchApiData() {
|
||||
try {
|
||||
const response = await fetch('https://data.esphome.io/components.json');
|
||||
const componentsData = await response.json();
|
||||
|
||||
// Extract target platforms and platform components directly from API
|
||||
targetPlatforms = componentsData.target_platforms || [];
|
||||
platformComponents = componentsData.platform_components || [];
|
||||
|
||||
console.log('Target platforms from API:', targetPlatforms.length, targetPlatforms);
|
||||
console.log('Platform components from API:', platformComponents.length, platformComponents);
|
||||
return {
|
||||
targetPlatforms: componentsData.target_platforms || [],
|
||||
platformComponents: componentsData.platform_components || []
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('Failed to fetch components data from API:', error.message);
|
||||
return { targetPlatforms: [], platformComponents: [] };
|
||||
}
|
||||
}
|
||||
|
||||
// Get environment variables
|
||||
const smallPrThreshold = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
|
||||
const maxLabels = parseInt('${{ env.MAX_LABELS }}');
|
||||
const tooBigThreshold = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
|
||||
|
||||
// Strategy: Merge to release or beta branch
|
||||
// Strategy: Merge branch detection
|
||||
async function detectMergeBranch() {
|
||||
const labels = new Set();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
if (baseRef !== 'dev') {
|
||||
|
||||
if (baseRef === 'release') {
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
}
|
||||
|
||||
// When targeting non-dev branches, only use merge warning labels
|
||||
const finalLabels = Array.from(labels);
|
||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||
|
||||
// Add new labels
|
||||
if (finalLabels.length > 0) {
|
||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Remove old managed labels that are no longer needed
|
||||
const labelsToRemove = managedLabels.filter(label =>
|
||||
!finalLabels.includes(label)
|
||||
);
|
||||
|
||||
for (const label of labelsToRemove) {
|
||||
console.log(`Removing label: ${label}`);
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return; // Exit early, don't process other strategies
|
||||
}
|
||||
|
||||
// Strategy: Component and Platform labeling
|
||||
// Strategy: Component and platform labeling
|
||||
async function detectComponentPlatforms(apiData) {
|
||||
const labels = new Set();
|
||||
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${targetPlatforms.join('|')})/`);
|
||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
||||
|
||||
for (const file of changedFiles) {
|
||||
// Check for component changes
|
||||
const componentMatch = file.match(componentRegex);
|
||||
if (componentMatch) {
|
||||
const component = componentMatch[1];
|
||||
labels.add(`component: ${component}`);
|
||||
labels.add(`component: ${componentMatch[1]}`);
|
||||
}
|
||||
|
||||
// Check for target platform changes
|
||||
const platformMatch = file.match(targetPlatformRegex);
|
||||
if (platformMatch) {
|
||||
const targetPlatform = platformMatch[1];
|
||||
labels.add(`platform: ${targetPlatform}`);
|
||||
labels.add(`platform: ${platformMatch[1]}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Get PR files for new component/platform detection
|
||||
const { data: prFiles } = await github.rest.pulls.listFiles({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New component detection
|
||||
async function detectNewComponents() {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
// Calculate changes excluding root tests directory for too-big calculation
|
||||
const testChanges = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = totalChanges - testChanges;
|
||||
console.log(`Test changes: ${testChanges}, Non-test changes: ${nonTestChanges}`);
|
||||
|
||||
// Strategy: New Component detection
|
||||
for (const file of addedFiles) {
|
||||
// Check for new component files: esphome/components/{component}/__init__.py
|
||||
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
||||
if (componentMatch) {
|
||||
try {
|
||||
// Read the content directly from the filesystem since we have it checked out
|
||||
const content = fs.readFileSync(file, 'utf8');
|
||||
|
||||
// Strategy: New Target Platform detection
|
||||
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
||||
labels.add('new-target-platform');
|
||||
}
|
||||
labels.add('new-component');
|
||||
} catch (error) {
|
||||
console.log(`Failed to read content of ${file}:`, error.message);
|
||||
// Fallback: assume it's a new component if we can't read the content
|
||||
labels.add('new-component');
|
||||
}
|
||||
labels.add('new-component');
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy: New Platform detection
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: New platform detection
|
||||
async function detectNewPlatforms(apiData) {
|
||||
const labels = new Set();
|
||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||
|
||||
for (const file of addedFiles) {
|
||||
// Check for new platform files: esphome/components/{component}/{platform}.py
|
||||
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
||||
if (platformFileMatch) {
|
||||
const [, component, platform] = platformFileMatch;
|
||||
if (platformComponents.includes(platform)) {
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
|
||||
// Check for new platform files: esphome/components/{component}/{platform}/__init__.py
|
||||
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
||||
if (platformDirMatch) {
|
||||
const [, component, platform] = platformDirMatch;
|
||||
if (platformComponents.includes(platform)) {
|
||||
if (apiData.platformComponents.includes(platform)) {
|
||||
labels.add('new-platform');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Core files detection
|
||||
async function detectCoreChanges() {
|
||||
const labels = new Set();
|
||||
const coreFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/core/') ||
|
||||
(file.startsWith('esphome/') && file.split('/').length === 2)
|
||||
@ -261,12 +221,33 @@ jobs:
|
||||
labels.add('core');
|
||||
}
|
||||
|
||||
// Strategy: Small PR detection
|
||||
if (totalChanges <= smallPrThreshold) {
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: PR size detection
|
||||
async function detectPRSize() {
|
||||
const labels = new Set();
|
||||
const testChanges = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
|
||||
const nonTestChanges = totalChanges - testChanges;
|
||||
|
||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||
labels.add('small-pr');
|
||||
}
|
||||
|
||||
// Don't add too-big if mega-pr label is already present
|
||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||
labels.add('too-big');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Dashboard changes
|
||||
async function detectDashboardChanges() {
|
||||
const labels = new Set();
|
||||
const dashboardFiles = changedFiles.filter(file =>
|
||||
file.startsWith('esphome/dashboard/') ||
|
||||
file.startsWith('esphome/components/dashboard_import/')
|
||||
@ -276,7 +257,12 @@ jobs:
|
||||
labels.add('dashboard');
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: GitHub Actions changes
|
||||
async function detectGitHubActionsChanges() {
|
||||
const labels = new Set();
|
||||
const githubActionsFiles = changedFiles.filter(file =>
|
||||
file.startsWith('.github/workflows/')
|
||||
);
|
||||
@ -285,9 +271,14 @@ jobs:
|
||||
labels.add('github-actions');
|
||||
}
|
||||
|
||||
// Strategy: Code Owner detection
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Code owner detection
|
||||
async function detectCodeOwner() {
|
||||
const labels = new Set();
|
||||
|
||||
try {
|
||||
// Fetch CODEOWNERS file from the repository (in case it was changed in this PR)
|
||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||
owner,
|
||||
repo,
|
||||
@ -297,14 +288,10 @@ jobs:
|
||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
// Parse CODEOWNERS file
|
||||
const codeownersLines = codeownersContent.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && !line.startsWith('#'));
|
||||
|
||||
let isCodeOwner = false;
|
||||
|
||||
// Precompile CODEOWNERS patterns into regex objects
|
||||
const codeownersRegexes = codeownersLines.map(line => {
|
||||
const parts = line.split(/\s+/);
|
||||
const pattern = parts[0];
|
||||
@ -312,17 +299,15 @@ jobs:
|
||||
|
||||
let regex;
|
||||
if (pattern.endsWith('*')) {
|
||||
// Directory pattern like "esphome/components/api/*"
|
||||
const dir = pattern.slice(0, -1);
|
||||
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
||||
} else if (pattern.includes('*')) {
|
||||
// Glob pattern
|
||||
// First escape all regex special chars except *, then replace * with .*
|
||||
const regexPattern = pattern
|
||||
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\\*/g, '.*');
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
regex = new RegExp(`^${regexPattern}$`);
|
||||
} else {
|
||||
// Exact match
|
||||
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
||||
}
|
||||
|
||||
@ -331,109 +316,147 @@ jobs:
|
||||
|
||||
for (const file of changedFiles) {
|
||||
for (const { regex, owners } of codeownersRegexes) {
|
||||
if (regex.test(file)) {
|
||||
// Check if PR author is in the owners list
|
||||
if (owners.some(owner => owner === `@${prAuthor}`)) {
|
||||
isCodeOwner = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isCodeOwner) break;
|
||||
}
|
||||
|
||||
if (isCodeOwner) {
|
||||
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
||||
labels.add('by-code-owner');
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Test detection
|
||||
const testFiles = changedFiles.filter(file =>
|
||||
file.startsWith('tests/')
|
||||
);
|
||||
async function detectTests() {
|
||||
const labels = new Set();
|
||||
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
labels.add('has-tests');
|
||||
} else {
|
||||
// Only check for needs-tests if this is a new component or new platform
|
||||
if (labels.has('new-component') || labels.has('new-platform')) {
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Strategy: Requirements detection
|
||||
async function detectRequirements(allLabels) {
|
||||
const labels = new Set();
|
||||
|
||||
// Check for missing tests
|
||||
if ((allLabels.has('new-component') || allLabels.has('new-platform')) && !allLabels.has('has-tests')) {
|
||||
labels.add('needs-tests');
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy: Documentation check for new components/platforms
|
||||
if (labels.has('new-component') || labels.has('new-platform')) {
|
||||
// Check for missing docs
|
||||
if (allLabels.has('new-component') || allLabels.has('new-platform')) {
|
||||
const prBody = context.payload.pull_request.body || '';
|
||||
|
||||
// Look for documentation PR links
|
||||
// Patterns to match:
|
||||
// - https://github.com/esphome/esphome-docs/pull/1234
|
||||
// - esphome/esphome-docs#1234
|
||||
const docsPrPatterns = [
|
||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||
/esphome\/esphome-docs#\d+/
|
||||
];
|
||||
|
||||
const hasDocsLink = docsPrPatterns.some(pattern => pattern.test(prBody));
|
||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||
|
||||
if (!hasDocsLink) {
|
||||
labels.add('needs-docs');
|
||||
}
|
||||
}
|
||||
|
||||
// Convert Set to Array
|
||||
let finalLabels = Array.from(labels);
|
||||
// Check for missing CODEOWNERS
|
||||
if (allLabels.has('new-component')) {
|
||||
const codeownersModified = prFiles.some(file =>
|
||||
file.filename === 'CODEOWNERS' &&
|
||||
(file.status === 'modified' || file.status === 'added') &&
|
||||
(file.additions || 0) > 0
|
||||
);
|
||||
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
if (!codeownersModified) {
|
||||
labels.add('needs-codeowners');
|
||||
}
|
||||
}
|
||||
|
||||
// Check if PR has mega-pr label
|
||||
const isMegaPR = currentLabels.includes('mega-pr');
|
||||
return labels;
|
||||
}
|
||||
|
||||
// Check if PR is too big (either too many labels or too many line changes)
|
||||
const tooManyLabels = finalLabels.length > maxLabels;
|
||||
const tooManyChanges = nonTestChanges > tooBigThreshold;
|
||||
// Generate review messages
|
||||
function generateReviewMessages(finalLabels) {
|
||||
const messages = [];
|
||||
const prAuthor = context.payload.pull_request.user.login;
|
||||
|
||||
if ((tooManyLabels || tooManyChanges) && !isMegaPR) {
|
||||
const originalLength = finalLabels.length;
|
||||
console.log(`PR is too big - Labels: ${originalLength}, Changes: ${totalChanges} (non-test: ${nonTestChanges})`);
|
||||
// Too big message
|
||||
if (finalLabels.includes('too-big')) {
|
||||
const testChanges = prFiles
|
||||
.filter(file => file.filename.startsWith('tests/'))
|
||||
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
|
||||
const nonTestChanges = totalChanges - testChanges;
|
||||
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||
|
||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${finalLabels.length} different components/areas.`;
|
||||
} else if (tooManyLabels) {
|
||||
message += `This PR affects ${finalLabels.length} different components/areas.`;
|
||||
} else {
|
||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||
}
|
||||
|
||||
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
||||
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
// CODEOWNERS message
|
||||
if (finalLabels.includes('needs-codeowners')) {
|
||||
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
||||
`Hey there @${prAuthor},\n` +
|
||||
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
||||
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
||||
`In \`__init__.py\` of the integration, please add:\n\n` +
|
||||
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
||||
`And run \`script/build_codeowners.py\``;
|
||||
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Handle reviews
|
||||
async function handleReviews(finalLabels) {
|
||||
const reviewMessages = generateReviewMessages(finalLabels);
|
||||
const hasReviewableLabels = finalLabels.some(label =>
|
||||
['too-big', 'needs-codeowners'].includes(label)
|
||||
);
|
||||
|
||||
// Get all reviews on this PR to check for existing bot reviews
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
|
||||
// Check if there's already an active bot review requesting changes
|
||||
const existingBotReview = reviews.find(review =>
|
||||
const botReviews = reviews.filter(review =>
|
||||
review.user.type === 'Bot' &&
|
||||
review.state === 'CHANGES_REQUESTED' &&
|
||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||
);
|
||||
|
||||
// If too big due to line changes only, keep original labels and add too-big
|
||||
// If too big due to too many labels, replace with just too-big
|
||||
if (tooManyChanges && !tooManyLabels) {
|
||||
finalLabels.push('too-big');
|
||||
} else {
|
||||
finalLabels = ['too-big'];
|
||||
}
|
||||
if (hasReviewableLabels) {
|
||||
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
||||
|
||||
// Only create a new review if there isn't already an active bot review
|
||||
if (!existingBotReview) {
|
||||
// Create appropriate review message
|
||||
let reviewBody;
|
||||
if (tooManyLabels && tooManyChanges) {
|
||||
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLength} different components/areas. Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
|
||||
} else if (tooManyLabels) {
|
||||
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR affects ${originalLength} different components/areas. Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
|
||||
if (botReviews.length > 0) {
|
||||
// Update existing review
|
||||
await github.rest.pulls.updateReview({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: botReviews[0].id,
|
||||
body: reviewBody
|
||||
});
|
||||
console.log('Updated existing bot review');
|
||||
} else {
|
||||
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR is too large with ${nonTestChanges} line changes (excluding tests). Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
|
||||
}
|
||||
|
||||
// Request changes on the PR
|
||||
// Create new review
|
||||
await github.rest.pulls.createReview({
|
||||
owner,
|
||||
repo,
|
||||
@ -441,32 +464,10 @@ jobs:
|
||||
body: reviewBody,
|
||||
event: 'REQUEST_CHANGES'
|
||||
});
|
||||
console.log('Created new "too big" review requesting changes');
|
||||
} else {
|
||||
console.log('Skipping review creation - existing bot review already requesting changes');
|
||||
console.log('Created new bot review');
|
||||
}
|
||||
} else {
|
||||
// Check if PR was previously too big but is now acceptable
|
||||
const wasPreviouslyTooBig = currentLabels.includes('too-big');
|
||||
|
||||
if (wasPreviouslyTooBig || isMegaPR) {
|
||||
console.log('PR is no longer too big or has mega-pr label - dismissing bot reviews');
|
||||
|
||||
// Get all reviews on this PR to find reviews to dismiss
|
||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr_number
|
||||
});
|
||||
|
||||
// Find bot reviews that requested changes
|
||||
const botReviews = reviews.filter(review =>
|
||||
review.user.type === 'Bot' &&
|
||||
review.state === 'CHANGES_REQUESTED' &&
|
||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||
);
|
||||
|
||||
// Dismiss bot reviews
|
||||
} else if (botReviews.length > 0) {
|
||||
// Dismiss existing reviews
|
||||
for (const review of botReviews) {
|
||||
try {
|
||||
await github.rest.pulls.dismissReview({
|
||||
@ -474,11 +475,9 @@ jobs:
|
||||
repo,
|
||||
pull_number: pr_number,
|
||||
review_id: review.id,
|
||||
message: isMegaPR ?
|
||||
'Review dismissed: mega-pr label was added' :
|
||||
'Review dismissed: PR size is now acceptable'
|
||||
message: 'Review dismissed: All requirements have been met'
|
||||
});
|
||||
console.log(`Dismissed review ${review.id}`);
|
||||
console.log(`Dismissed bot review ${review.id}`);
|
||||
} catch (error) {
|
||||
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
||||
}
|
||||
@ -486,7 +485,114 @@ jobs:
|
||||
}
|
||||
}
|
||||
|
||||
// Add new labels
|
||||
// Main execution
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for non-dev branches
|
||||
if (baseRef !== 'dev') {
|
||||
const branchLabels = await detectMergeBranch();
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
labels: finalLabels
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old managed labels
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr_number,
|
||||
name: label
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to remove label ${label}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Run all strategies
|
||||
const [
|
||||
branchLabels,
|
||||
componentLabels,
|
||||
newComponentLabels,
|
||||
newPlatformLabels,
|
||||
coreLabels,
|
||||
sizeLabels,
|
||||
dashboardLabels,
|
||||
actionsLabels,
|
||||
codeOwnerLabels,
|
||||
testLabels
|
||||
] = await Promise.all([
|
||||
detectMergeBranch(),
|
||||
detectComponentPlatforms(apiData),
|
||||
detectNewComponents(),
|
||||
detectNewPlatforms(apiData),
|
||||
detectCoreChanges(),
|
||||
detectPRSize(),
|
||||
detectDashboardChanges(),
|
||||
detectGitHubActionsChanges(),
|
||||
detectCodeOwner(),
|
||||
detectTests()
|
||||
]);
|
||||
|
||||
// Combine all labels
|
||||
const allLabels = new Set([
|
||||
...branchLabels,
|
||||
...componentLabels,
|
||||
...newComponentLabels,
|
||||
...newPlatformLabels,
|
||||
...coreLabels,
|
||||
...sizeLabels,
|
||||
...dashboardLabels,
|
||||
...actionsLabels,
|
||||
...codeOwnerLabels,
|
||||
...testLabels
|
||||
]);
|
||||
|
||||
// Detect requirements based on all other labels
|
||||
const requirementLabels = await detectRequirements(allLabels);
|
||||
for (const label of requirementLabels) {
|
||||
allLabels.add(label);
|
||||
}
|
||||
|
||||
let finalLabels = Array.from(allLabels);
|
||||
|
||||
// For mega-PRs, exclude component labels if there are too many
|
||||
if (isMegaPR) {
|
||||
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
||||
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
||||
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
||||
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle too many labels (only for non-mega PRs)
|
||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||
|
||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||
finalLabels = ['too-big'];
|
||||
}
|
||||
|
||||
console.log('Computed labels:', finalLabels.join(', '));
|
||||
|
||||
// Handle reviews
|
||||
await handleReviews(finalLabels);
|
||||
|
||||
// Apply labels
|
||||
if (finalLabels.length > 0) {
|
||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||
await github.rest.issues.addLabels({
|
||||
@ -497,11 +603,8 @@ jobs:
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old managed labels that are no longer needed
|
||||
const labelsToRemove = managedLabels.filter(label =>
|
||||
!finalLabels.includes(label)
|
||||
);
|
||||
|
||||
// Remove old managed labels
|
||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||
for (const label of labelsToRemove) {
|
||||
console.log(`Removing label: ${label}`);
|
||||
try {
|
||||
|
@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.12.4
|
||||
rev: v0.12.5
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
@ -89,9 +89,9 @@ def choose_prompt(options, purpose: str = None):
|
||||
def choose_upload_log_host(
|
||||
default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
|
||||
):
|
||||
options = []
|
||||
for port in get_serial_ports():
|
||||
options.append((f"{port.path} ({port.description})", port.path))
|
||||
options = [
|
||||
(f"{port.path} ({port.description})", port.path) for port in get_serial_ports()
|
||||
]
|
||||
if default == "SERIAL":
|
||||
return choose_prompt(options, purpose=purpose)
|
||||
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
|
||||
@ -119,9 +119,7 @@ def mqtt_logging_enabled(mqtt_config):
|
||||
return False
|
||||
if CONF_TOPIC not in log_topic:
|
||||
return False
|
||||
if log_topic.get(CONF_LEVEL, None) == "NONE":
|
||||
return False
|
||||
return True
|
||||
return log_topic.get(CONF_LEVEL, None) != "NONE"
|
||||
|
||||
|
||||
def get_port_type(port):
|
||||
|
@ -14,6 +14,8 @@ with warnings.catch_warnings():
|
||||
from aioesphomeapi import APIClient, parse_log_message
|
||||
from aioesphomeapi.log_runner import async_run
|
||||
|
||||
import contextlib
|
||||
|
||||
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
|
||||
from esphome.core import CORE
|
||||
|
||||
@ -66,7 +68,5 @@ async def async_run_logs(config: dict[str, Any], address: str) -> None:
|
||||
|
||||
def run_logs(config: dict[str, Any], address: str) -> None:
|
||||
"""Run the logs command."""
|
||||
try:
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
asyncio.run(async_run_logs(config, address))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
@ -266,8 +266,10 @@ async def delayed_off_filter_to_code(config, filter_id):
|
||||
async def autorepeat_filter_to_code(config, filter_id):
|
||||
timings = []
|
||||
if len(config) > 0:
|
||||
for conf in config:
|
||||
timings.append((conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON]))
|
||||
timings.extend(
|
||||
(conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
|
||||
for conf in config
|
||||
)
|
||||
else:
|
||||
timings.append(
|
||||
(
|
||||
@ -573,16 +575,15 @@ async def setup_binary_sensor_core_(var, config):
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
for conf in config.get(CONF_ON_MULTI_CLICK, []):
|
||||
timings = []
|
||||
for tim in conf[CONF_TIMING]:
|
||||
timings.append(
|
||||
timings = [
|
||||
cg.StructInitializer(
|
||||
MultiClickTriggerEvent,
|
||||
("state", tim[CONF_STATE]),
|
||||
("min_length", tim[CONF_MIN_LENGTH]),
|
||||
("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
|
||||
)
|
||||
)
|
||||
for tim in conf[CONF_TIMING]
|
||||
]
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var, timings)
|
||||
if CONF_INVALID_COOLDOWN in conf:
|
||||
cg.add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN]))
|
||||
|
@ -22,8 +22,7 @@ def validate_id(config):
|
||||
if CONF_CAN_ID in config:
|
||||
can_id = config[CONF_CAN_ID]
|
||||
id_ext = config[CONF_USE_EXTENDED_ID]
|
||||
if not id_ext:
|
||||
if can_id > 0x7FF:
|
||||
if not id_ext and can_id > 0x7FF:
|
||||
raise cv.Invalid("Standard IDs must be 11 Bit (0x000-0x7ff / 0-2047)")
|
||||
return config
|
||||
|
||||
|
@ -74,8 +74,7 @@ def range_segment_list(input):
|
||||
if isinstance(input, list):
|
||||
for list_item in input:
|
||||
if isinstance(list_item, list):
|
||||
for item in list_item:
|
||||
flat_list.append(item)
|
||||
flat_list.extend(list_item)
|
||||
else:
|
||||
flat_list.append(list_item)
|
||||
else:
|
||||
|
@ -973,8 +973,10 @@ def _write_idf_component_yml():
|
||||
|
||||
# Called by writer.py
|
||||
def copy_files():
|
||||
if CORE.using_arduino:
|
||||
if "partitions.csv" not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]:
|
||||
if (
|
||||
CORE.using_arduino
|
||||
and "partitions.csv" not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]
|
||||
):
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path("partitions.csv"),
|
||||
get_arduino_partition_csv(
|
||||
@ -1000,7 +1002,7 @@ def copy_files():
|
||||
__version__,
|
||||
)
|
||||
|
||||
for _, file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].items():
|
||||
for file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].values():
|
||||
if file[KEY_PATH].startswith("http"):
|
||||
import requests
|
||||
|
||||
|
@ -140,8 +140,11 @@ VALUE_TYPES = {
|
||||
|
||||
|
||||
def validate_char_on_write(char_config):
|
||||
if CONF_ON_WRITE in char_config:
|
||||
if not char_config[CONF_WRITE] and not char_config[CONF_WRITE_NO_RESPONSE]:
|
||||
if (
|
||||
CONF_ON_WRITE in char_config
|
||||
and not char_config[CONF_WRITE]
|
||||
and not char_config[CONF_WRITE_NO_RESPONSE]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"{CONF_ON_WRITE} requires the {CONF_WRITE} or {CONF_WRITE_NO_RESPONSE} property to be set"
|
||||
)
|
||||
@ -149,8 +152,7 @@ def validate_char_on_write(char_config):
|
||||
|
||||
|
||||
def validate_descriptor(desc_config):
|
||||
if CONF_ON_WRITE in desc_config:
|
||||
if not desc_config[CONF_WRITE]:
|
||||
if CONF_ON_WRITE in desc_config and not desc_config[CONF_WRITE]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_ON_WRITE} requires the {CONF_WRITE} property to be set"
|
||||
)
|
||||
|
@ -310,9 +310,7 @@ async def to_code(config):
|
||||
for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if CONF_MAC_ADDRESS in conf:
|
||||
addr_list = []
|
||||
for it in conf[CONF_MAC_ADDRESS]:
|
||||
addr_list.append(it.as_hex)
|
||||
addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
|
||||
cg.add(trigger.set_addresses(addr_list))
|
||||
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):
|
||||
|
@ -294,8 +294,7 @@ async def to_code(config):
|
||||
)
|
||||
)
|
||||
|
||||
if get_esp32_variant() == VARIANT_ESP32:
|
||||
if CONF_IIR_FILTER in config:
|
||||
if get_esp32_variant() == VARIANT_ESP32 and CONF_IIR_FILTER in config:
|
||||
cg.add(touch.set_iir_filter(config[CONF_IIR_FILTER]))
|
||||
|
||||
if get_esp32_variant() == VARIANT_ESP32S2 or get_esp32_variant() == VARIANT_ESP32S3:
|
||||
|
@ -245,7 +245,7 @@ async def to_code(config):
|
||||
if ver <= cv.Version(2, 3, 0):
|
||||
# No ld script support
|
||||
ld_script = None
|
||||
if ver <= cv.Version(2, 4, 2):
|
||||
elif ver <= cv.Version(2, 4, 2):
|
||||
# Old ld script path
|
||||
ld_script = ld_scripts[0]
|
||||
else:
|
||||
|
@ -73,8 +73,7 @@ def ota_esphome_final_validate(config):
|
||||
else:
|
||||
new_ota_conf.append(ota_conf)
|
||||
|
||||
for port_conf in merged_ota_esphome_configs_by_port.values():
|
||||
new_ota_conf.append(port_conf)
|
||||
new_ota_conf.extend(merged_ota_esphome_configs_by_port.values())
|
||||
|
||||
full_conf[CONF_OTA] = new_ota_conf
|
||||
fv.full_config.set(full_conf)
|
||||
|
@ -112,7 +112,7 @@ def _is_framework_spi_polling_mode_supported():
|
||||
return True
|
||||
if cv.Version(5, 3, 0) > framework_version >= cv.Version(5, 2, 1):
|
||||
return True
|
||||
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4):
|
||||
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4): # noqa: SIM103
|
||||
return True
|
||||
return False
|
||||
if CORE.using_arduino:
|
||||
|
@ -1,5 +1,97 @@
|
||||
from esphome.automation import Trigger, build_automation, validate_automation
|
||||
import esphome.codegen as cg
|
||||
from esphome.components.esp8266 import CONF_RESTORE_FROM_FLASH, KEY_ESP8266
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_TRIGGER_ID,
|
||||
PLATFORM_BK72XX,
|
||||
PLATFORM_ESP32,
|
||||
PLATFORM_ESP8266,
|
||||
PLATFORM_LN882X,
|
||||
PLATFORM_RTL87XX,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.final_validate import full_config
|
||||
|
||||
CODEOWNERS = ["@anatoly-savchenkov"]
|
||||
|
||||
factory_reset_ns = cg.esphome_ns.namespace("factory_reset")
|
||||
FactoryResetComponent = factory_reset_ns.class_("FactoryResetComponent", cg.Component)
|
||||
FastBootTrigger = factory_reset_ns.class_("FastBootTrigger", Trigger, cg.Component)
|
||||
|
||||
CONF_MAX_DELAY = "max_delay"
|
||||
CONF_RESETS_REQUIRED = "resets_required"
|
||||
CONF_ON_INCREMENT = "on_increment"
|
||||
|
||||
|
||||
def _validate(config):
|
||||
if CONF_RESETS_REQUIRED in config:
|
||||
return cv.only_on(
|
||||
[
|
||||
PLATFORM_BK72XX,
|
||||
PLATFORM_ESP32,
|
||||
PLATFORM_ESP8266,
|
||||
PLATFORM_LN882X,
|
||||
PLATFORM_RTL87XX,
|
||||
]
|
||||
)(config)
|
||||
|
||||
if CONF_ON_INCREMENT in config:
|
||||
raise cv.Invalid(
|
||||
f"'{CONF_ON_INCREMENT}' requires a value for '{CONF_RESETS_REQUIRED}'"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(FactoryResetComponent),
|
||||
cv.Optional(CONF_MAX_DELAY, default="10s"): cv.All(
|
||||
cv.positive_time_period_seconds,
|
||||
cv.Range(min=cv.TimePeriod(milliseconds=1000)),
|
||||
),
|
||||
cv.Optional(CONF_RESETS_REQUIRED): cv.positive_not_null_int,
|
||||
cv.Optional(CONF_ON_INCREMENT): validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(FastBootTrigger),
|
||||
}
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA),
|
||||
_validate,
|
||||
)
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
if CORE.is_esp8266 and CONF_RESETS_REQUIRED in config:
|
||||
fconfig = full_config.get()
|
||||
if not fconfig.get_config_for_path([KEY_ESP8266, CONF_RESTORE_FROM_FLASH]):
|
||||
raise cv.Invalid(
|
||||
"'resets_required' needs 'restore_from_flash' to be enabled in the 'esp8266' configuration"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
if reset_count := config.get(CONF_RESETS_REQUIRED):
|
||||
var = cg.new_Pvariable(
|
||||
config[CONF_ID],
|
||||
reset_count,
|
||||
config[CONF_MAX_DELAY].total_milliseconds,
|
||||
)
|
||||
await cg.register_component(var, config)
|
||||
for conf in config.get(CONF_ON_INCREMENT, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await build_automation(
|
||||
trigger,
|
||||
[
|
||||
(cg.uint8, "x"),
|
||||
(cg.uint8, "target"),
|
||||
],
|
||||
conf,
|
||||
)
|
||||
|
76
esphome/components/factory_reset/factory_reset.cpp
Normal file
76
esphome/components/factory_reset/factory_reset.cpp
Normal file
@ -0,0 +1,76 @@
|
||||
#include "factory_reset.h"
|
||||
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
#if !defined(USE_RP2040) && !defined(USE_HOST)
|
||||
|
||||
namespace esphome {
|
||||
namespace factory_reset {
|
||||
|
||||
static const char *const TAG = "factory_reset";
|
||||
static const uint32_t POWER_CYCLES_KEY = 0xFA5C0DE;
|
||||
|
||||
static bool was_power_cycled() {
|
||||
#ifdef USE_ESP32
|
||||
return esp_reset_reason() == ESP_RST_POWERON;
|
||||
#endif
|
||||
#ifdef USE_ESP8266
|
||||
auto reset_reason = EspClass::getResetReason();
|
||||
return strcasecmp(reset_reason.c_str(), "power On") == 0 || strcasecmp(reset_reason.c_str(), "external system") == 0;
|
||||
#endif
|
||||
#ifdef USE_LIBRETINY
|
||||
auto reason = lt_get_reboot_reason();
|
||||
return reason == REBOOT_REASON_POWER || reason == REBOOT_REASON_HARDWARE;
|
||||
#endif
|
||||
}
|
||||
|
||||
void FactoryResetComponent::dump_config() {
|
||||
uint8_t count = 0;
|
||||
this->flash_.load(&count);
|
||||
ESP_LOGCONFIG(TAG, "Factory Reset by Reset:");
|
||||
ESP_LOGCONFIG(TAG,
|
||||
" Max interval between resets %" PRIu32 " seconds\n"
|
||||
" Current count: %u\n"
|
||||
" Factory reset after %u resets",
|
||||
this->max_interval_ / 1000, count, this->required_count_);
|
||||
}
|
||||
|
||||
void FactoryResetComponent::save_(uint8_t count) {
|
||||
this->flash_.save(&count);
|
||||
global_preferences->sync();
|
||||
this->defer([count, this] { this->increment_callback_.call(count, this->required_count_); });
|
||||
}
|
||||
|
||||
void FactoryResetComponent::setup() {
|
||||
this->flash_ = global_preferences->make_preference<uint8_t>(POWER_CYCLES_KEY, true);
|
||||
if (was_power_cycled()) {
|
||||
uint8_t count = 0;
|
||||
this->flash_.load(&count);
|
||||
// this is a power on reset or external system reset
|
||||
count++;
|
||||
if (count == this->required_count_) {
|
||||
ESP_LOGW(TAG, "Reset count reached, factory resetting");
|
||||
global_preferences->reset();
|
||||
// delay to allow log to be sent
|
||||
delay(100); // NOLINT
|
||||
App.safe_reboot(); // should not return
|
||||
}
|
||||
this->save_(count);
|
||||
ESP_LOGD(TAG, "Power on reset detected, incremented count to %u", count);
|
||||
this->set_timeout(this->max_interval_, [this]() {
|
||||
ESP_LOGD(TAG, "No reset in the last %" PRIu32 " seconds, resetting count", this->max_interval_ / 1000);
|
||||
this->save_(0); // reset count
|
||||
});
|
||||
} else {
|
||||
this->save_(0); // reset count if not a power cycle
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace factory_reset
|
||||
} // namespace esphome
|
||||
|
||||
#endif // !defined(USE_RP2040) && !defined(USE_HOST)
|
43
esphome/components/factory_reset/factory_reset.h
Normal file
43
esphome/components/factory_reset/factory_reset.h
Normal file
@ -0,0 +1,43 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/preferences.h"
|
||||
#if !defined(USE_RP2040) && !defined(USE_HOST)
|
||||
|
||||
#ifdef USE_ESP32
|
||||
#include <esp_system.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace factory_reset {
|
||||
class FactoryResetComponent : public Component {
|
||||
public:
|
||||
FactoryResetComponent(uint8_t required_count, uint32_t max_interval)
|
||||
: required_count_(required_count), max_interval_(max_interval) {}
|
||||
|
||||
void dump_config() override;
|
||||
void setup() override;
|
||||
void add_increment_callback(std::function<void(uint8_t, uint8_t)> &&callback) {
|
||||
this->increment_callback_.add(std::move(callback));
|
||||
}
|
||||
|
||||
protected:
|
||||
~FactoryResetComponent() = default;
|
||||
void save_(uint8_t count);
|
||||
ESPPreferenceObject flash_{}; // saves the number of fast power cycles
|
||||
uint8_t required_count_; // The number of boot attempts before fast boot is enabled
|
||||
uint32_t max_interval_; // max interval between power cycles
|
||||
CallbackManager<void(uint8_t, uint8_t)> increment_callback_{};
|
||||
};
|
||||
|
||||
class FastBootTrigger : public Trigger<uint8_t, uint8_t> {
|
||||
public:
|
||||
explicit FastBootTrigger(FactoryResetComponent *parent) {
|
||||
parent->add_increment_callback([this](uint8_t current, uint8_t target) { this->trigger(current, target); });
|
||||
}
|
||||
};
|
||||
} // namespace factory_reset
|
||||
} // namespace esphome
|
||||
|
||||
#endif // !defined(USE_RP2040) && !defined(USE_HOST)
|
@ -55,9 +55,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
async def to_code(config):
|
||||
var = await fastled_base.new_fastled_light(config)
|
||||
|
||||
rgb_order = cg.RawExpression(
|
||||
config[CONF_RGB_ORDER] if CONF_RGB_ORDER in config else "RGB"
|
||||
)
|
||||
rgb_order = cg.RawExpression(config.get(CONF_RGB_ORDER, "RGB"))
|
||||
data_rate = None
|
||||
|
||||
if CONF_DATA_RATE in config:
|
||||
|
@ -84,7 +84,6 @@ CONFIG_SCHEMA = cv.All(
|
||||
)
|
||||
.extend(cv.polling_component_schema("20s"))
|
||||
.extend(uart.UART_DEVICE_SCHEMA),
|
||||
cv.only_with_arduino,
|
||||
)
|
||||
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema("gps", require_rx=True)
|
||||
|
||||
@ -123,4 +122,9 @@ async def to_code(config):
|
||||
cg.add(var.set_hdop_sensor(sens))
|
||||
|
||||
# https://platformio.org/lib/show/1655/TinyGPSPlus
|
||||
cg.add_library("mikalhart/TinyGPSPlus", "1.1.0")
|
||||
# Using fork of TinyGPSPlus patched to build on ESP-IDF
|
||||
cg.add_library(
|
||||
"TinyGPSPlus",
|
||||
None,
|
||||
"https://github.com/esphome/TinyGPSPlus.git#v1.1.0",
|
||||
)
|
||||
|
@ -1,5 +1,3 @@
|
||||
#ifdef USE_ARDUINO
|
||||
|
||||
#include "gps.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
@ -22,63 +20,69 @@ void GPS::dump_config() {
|
||||
}
|
||||
|
||||
void GPS::update() {
|
||||
if (this->latitude_sensor_ != nullptr)
|
||||
if (this->latitude_sensor_ != nullptr) {
|
||||
this->latitude_sensor_->publish_state(this->latitude_);
|
||||
}
|
||||
|
||||
if (this->longitude_sensor_ != nullptr)
|
||||
if (this->longitude_sensor_ != nullptr) {
|
||||
this->longitude_sensor_->publish_state(this->longitude_);
|
||||
}
|
||||
|
||||
if (this->speed_sensor_ != nullptr)
|
||||
if (this->speed_sensor_ != nullptr) {
|
||||
this->speed_sensor_->publish_state(this->speed_);
|
||||
}
|
||||
|
||||
if (this->course_sensor_ != nullptr)
|
||||
if (this->course_sensor_ != nullptr) {
|
||||
this->course_sensor_->publish_state(this->course_);
|
||||
}
|
||||
|
||||
if (this->altitude_sensor_ != nullptr)
|
||||
if (this->altitude_sensor_ != nullptr) {
|
||||
this->altitude_sensor_->publish_state(this->altitude_);
|
||||
}
|
||||
|
||||
if (this->satellites_sensor_ != nullptr)
|
||||
if (this->satellites_sensor_ != nullptr) {
|
||||
this->satellites_sensor_->publish_state(this->satellites_);
|
||||
}
|
||||
|
||||
if (this->hdop_sensor_ != nullptr)
|
||||
if (this->hdop_sensor_ != nullptr) {
|
||||
this->hdop_sensor_->publish_state(this->hdop_);
|
||||
}
|
||||
}
|
||||
|
||||
void GPS::loop() {
|
||||
while (this->available() > 0 && !this->has_time_) {
|
||||
if (this->tiny_gps_.encode(this->read())) {
|
||||
if (!this->tiny_gps_.encode(this->read())) {
|
||||
return;
|
||||
}
|
||||
if (this->tiny_gps_.location.isUpdated()) {
|
||||
this->latitude_ = this->tiny_gps_.location.lat();
|
||||
this->longitude_ = this->tiny_gps_.location.lng();
|
||||
|
||||
ESP_LOGD(TAG, "Location:");
|
||||
ESP_LOGD(TAG, " Lat: %.6f °", this->latitude_);
|
||||
ESP_LOGD(TAG, " Lon: %.6f °", this->longitude_);
|
||||
ESP_LOGV(TAG, "Latitude, Longitude: %.6f°, %.6f°", this->latitude_, this->longitude_);
|
||||
}
|
||||
|
||||
if (this->tiny_gps_.speed.isUpdated()) {
|
||||
this->speed_ = this->tiny_gps_.speed.kmph();
|
||||
ESP_LOGD(TAG, "Speed: %.3f km/h", this->speed_);
|
||||
ESP_LOGV(TAG, "Speed: %.3f km/h", this->speed_);
|
||||
}
|
||||
|
||||
if (this->tiny_gps_.course.isUpdated()) {
|
||||
this->course_ = this->tiny_gps_.course.deg();
|
||||
ESP_LOGD(TAG, "Course: %.2f °", this->course_);
|
||||
ESP_LOGV(TAG, "Course: %.2f°", this->course_);
|
||||
}
|
||||
|
||||
if (this->tiny_gps_.altitude.isUpdated()) {
|
||||
this->altitude_ = this->tiny_gps_.altitude.meters();
|
||||
ESP_LOGD(TAG, "Altitude: %.2f m", this->altitude_);
|
||||
ESP_LOGV(TAG, "Altitude: %.2f m", this->altitude_);
|
||||
}
|
||||
|
||||
if (this->tiny_gps_.satellites.isUpdated()) {
|
||||
this->satellites_ = this->tiny_gps_.satellites.value();
|
||||
ESP_LOGD(TAG, "Satellites: %d", this->satellites_);
|
||||
ESP_LOGV(TAG, "Satellites: %d", this->satellites_);
|
||||
}
|
||||
|
||||
if (this->tiny_gps_.hdop.isUpdated()) {
|
||||
this->hdop_ = this->tiny_gps_.hdop.hdop();
|
||||
ESP_LOGD(TAG, "HDOP: %.3f", this->hdop_);
|
||||
ESP_LOGV(TAG, "HDOP: %.3f", this->hdop_);
|
||||
}
|
||||
|
||||
for (auto *listener : this->listeners_) {
|
||||
@ -86,9 +90,6 @@ void GPS::loop() {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace gps
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
|
@ -1,10 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include <TinyGPSPlus.h>
|
||||
|
||||
#include <vector>
|
||||
@ -53,8 +51,9 @@ class GPS : public PollingComponent, public uart::UARTDevice {
|
||||
float speed_{NAN};
|
||||
float course_{NAN};
|
||||
float altitude_{NAN};
|
||||
uint16_t satellites_{0};
|
||||
float hdop_{NAN};
|
||||
uint16_t satellites_{0};
|
||||
bool has_time_{false};
|
||||
|
||||
sensor::Sensor *latitude_sensor_{nullptr};
|
||||
sensor::Sensor *longitude_sensor_{nullptr};
|
||||
@ -64,12 +63,9 @@ class GPS : public PollingComponent, public uart::UARTDevice {
|
||||
sensor::Sensor *satellites_sensor_{nullptr};
|
||||
sensor::Sensor *hdop_sensor_{nullptr};
|
||||
|
||||
bool has_time_{false};
|
||||
TinyGPSPlus tiny_gps_;
|
||||
std::vector<GPSListener *> listeners_{};
|
||||
};
|
||||
|
||||
} // namespace gps
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
|
@ -1,5 +1,3 @@
|
||||
#ifdef USE_ARDUINO
|
||||
|
||||
#include "gps_time.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
@ -9,12 +7,10 @@ namespace gps {
|
||||
static const char *const TAG = "gps.time";
|
||||
|
||||
void GPSTime::from_tiny_gps_(TinyGPSPlus &tiny_gps) {
|
||||
if (!tiny_gps.time.isValid() || !tiny_gps.date.isValid())
|
||||
return;
|
||||
if (!tiny_gps.time.isUpdated() || !tiny_gps.date.isUpdated())
|
||||
return;
|
||||
if (tiny_gps.date.year() < 2019)
|
||||
if (!tiny_gps.time.isValid() || !tiny_gps.date.isValid() || !tiny_gps.time.isUpdated() ||
|
||||
!tiny_gps.date.isUpdated() || tiny_gps.date.year() < 2025) {
|
||||
return;
|
||||
}
|
||||
|
||||
ESPTime val{};
|
||||
val.year = tiny_gps.date.year();
|
||||
@ -34,5 +30,3 @@ void GPSTime::from_tiny_gps_(TinyGPSPlus &tiny_gps) {
|
||||
|
||||
} // namespace gps
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
|
@ -1,10 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/time/real_time_clock.h"
|
||||
#include "esphome/components/gps/gps.h"
|
||||
#include "esphome/components/time/real_time_clock.h"
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace gps {
|
||||
@ -13,9 +11,10 @@ class GPSTime : public time::RealTimeClock, public GPSListener {
|
||||
public:
|
||||
void update() override { this->from_tiny_gps_(this->get_tiny_gps()); };
|
||||
void on_update(TinyGPSPlus &tiny_gps) override {
|
||||
if (!this->has_time_)
|
||||
if (!this->has_time_) {
|
||||
this->from_tiny_gps_(tiny_gps);
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
void from_tiny_gps_(TinyGPSPlus &tiny_gps);
|
||||
@ -24,5 +23,3 @@ class GPSTime : public time::RealTimeClock, public GPSListener {
|
||||
|
||||
} // namespace gps
|
||||
} // namespace esphome
|
||||
|
||||
#endif // USE_ARDUINO
|
||||
|
@ -116,7 +116,7 @@ GRAPH_SCHEMA = cv.Schema(
|
||||
|
||||
|
||||
def _relocate_fields_to_subfolder(config, subfolder, subschema):
|
||||
fields = [k.schema for k in subschema.schema.keys()]
|
||||
fields = [k.schema for k in subschema.schema]
|
||||
fields.remove(CONF_ID)
|
||||
if subfolder in config:
|
||||
# Ensure no ambiguous fields in base of config
|
||||
|
@ -70,8 +70,7 @@ def validate_url(value):
|
||||
def validate_ssl_verification(config):
|
||||
error_message = ""
|
||||
|
||||
if CORE.is_esp32:
|
||||
if not CORE.using_esp_idf and config[CONF_VERIFY_SSL]:
|
||||
if CORE.is_esp32 and not CORE.using_esp_idf and config[CONF_VERIFY_SSL]:
|
||||
error_message = "ESPHome supports certificate verification only via ESP-IDF"
|
||||
|
||||
if CORE.is_rp2040 and config[CONF_VERIFY_SSL]:
|
||||
|
@ -66,8 +66,7 @@ PROTOCOL_NAMES = {
|
||||
|
||||
def _validate(config):
|
||||
for conf, models in SUPPORTED_OPTIONS.items():
|
||||
if conf in config:
|
||||
if config[CONF_MODEL] not in models:
|
||||
if conf in config and config[CONF_MODEL] not in models:
|
||||
raise cv.Invalid(
|
||||
f"{conf} is only available on {' and '.join(models)}, not {config[CONF_MODEL]}"
|
||||
)
|
||||
|
@ -243,10 +243,7 @@ def _final_validate(_):
|
||||
|
||||
|
||||
def use_legacy():
|
||||
if CORE.using_esp_idf:
|
||||
if not _use_legacy_driver:
|
||||
return False
|
||||
return True
|
||||
return not (CORE.using_esp_idf and not _use_legacy_driver)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
@ -44,8 +44,7 @@ PDM_VARIANTS = [esp32.const.VARIANT_ESP32, esp32.const.VARIANT_ESP32S3]
|
||||
def _validate_esp32_variant(config):
|
||||
variant = esp32.get_esp32_variant()
|
||||
if config[CONF_ADC_TYPE] == "external":
|
||||
if config[CONF_PDM]:
|
||||
if variant not in PDM_VARIANTS:
|
||||
if config[CONF_PDM] and variant not in PDM_VARIANTS:
|
||||
raise cv.Invalid(f"{variant} does not support PDM")
|
||||
return config
|
||||
if config[CONF_ADC_TYPE] == "internal":
|
||||
@ -122,8 +121,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
if not use_legacy():
|
||||
if config[CONF_ADC_TYPE] == "internal":
|
||||
if not use_legacy() and config[CONF_ADC_TYPE] == "internal":
|
||||
raise cv.Invalid("Internal ADC is only compatible with legacy i2s driver.")
|
||||
|
||||
|
||||
|
@ -138,8 +138,9 @@ def _validate(config):
|
||||
]:
|
||||
raise cv.Invalid("Selected model can't run on ESP8266.")
|
||||
|
||||
if model == "CUSTOM":
|
||||
if CONF_INIT_SEQUENCE not in config or CONF_DIMENSIONS not in config:
|
||||
if model == "CUSTOM" and (
|
||||
CONF_INIT_SEQUENCE not in config or CONF_DIMENSIONS not in config
|
||||
):
|
||||
raise cv.Invalid("CUSTOM model requires init_sequence and dimensions")
|
||||
|
||||
return config
|
||||
|
@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
@ -174,8 +175,7 @@ class ImageGrayscale(ImageEncoder):
|
||||
b = 1
|
||||
if self.invert_alpha:
|
||||
b ^= 0xFF
|
||||
if self.transparency == CONF_ALPHA_CHANNEL:
|
||||
if a != 0xFF:
|
||||
if self.transparency == CONF_ALPHA_CHANNEL and a != 0xFF:
|
||||
b = a
|
||||
self.data[self.index] = b
|
||||
self.index += 1
|
||||
@ -672,10 +672,8 @@ async def write_image(config, all_frames=False):
|
||||
invert_alpha = config[CONF_INVERT_ALPHA]
|
||||
frame_count = 1
|
||||
if all_frames:
|
||||
try:
|
||||
with contextlib.suppress(AttributeError):
|
||||
frame_count = image.n_frames
|
||||
except AttributeError:
|
||||
pass
|
||||
if frame_count <= 1:
|
||||
_LOGGER.warning("Image file %s has no animation frames", path)
|
||||
|
||||
|
@ -27,8 +27,7 @@ def validate_logger(config):
|
||||
logger_conf = fv.full_config.get()[CONF_LOGGER]
|
||||
if logger_conf[CONF_BAUD_RATE] == 0:
|
||||
raise cv.Invalid("improv_serial requires the logger baud_rate to be not 0")
|
||||
if CORE.using_esp_idf:
|
||||
if (
|
||||
if CORE.using_esp_idf and (
|
||||
logger_conf[CONF_HARDWARE_UART] == USB_CDC
|
||||
and get_esp32_variant() == VARIANT_ESP32S3
|
||||
):
|
||||
|
@ -78,11 +78,8 @@ def validate_model_config(config):
|
||||
model = config[CONF_MODEL]
|
||||
|
||||
for key in config:
|
||||
if key in SENSOR_MODEL_OPTIONS:
|
||||
if model not in SENSOR_MODEL_OPTIONS[key]:
|
||||
raise cv.Invalid(
|
||||
f"Device model '{model}' does not support '{key}' sensor"
|
||||
)
|
||||
if key in SENSOR_MODEL_OPTIONS and model not in SENSOR_MODEL_OPTIONS[key]:
|
||||
raise cv.Invalid(f"Device model '{model}' does not support '{key}' sensor")
|
||||
|
||||
tempco = config[CONF_TEMPERATURE_COEFFICIENT]
|
||||
if tempco > 0 and model not in ["INA228", "INA229"]:
|
||||
|
@ -41,9 +41,7 @@ CONFIG_SCHEMA = lcd_base.LCD_SCHEMA.extend(
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await lcd_base.setup_lcd_display(var, config)
|
||||
pins_ = []
|
||||
for conf in config[CONF_DATA_PINS]:
|
||||
pins_.append(await cg.gpio_pin_expression(conf))
|
||||
pins_ = [await cg.gpio_pin_expression(conf) for conf in config[CONF_DATA_PINS]]
|
||||
cg.add(var.set_data_pins(*pins_))
|
||||
enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN])
|
||||
cg.add(var.set_enable_pin(enable))
|
||||
|
@ -56,7 +56,8 @@ async def to_code(config):
|
||||
sens = await text_sensor.new_text_sensor(mac_address_config)
|
||||
cg.add(ld2450_component.set_mac_text_sensor(sens))
|
||||
for n in range(MAX_TARGETS):
|
||||
if direction_conf := config.get(f"target_{n + 1}"):
|
||||
if direction_config := direction_conf.get(CONF_DIRECTION):
|
||||
if (direction_conf := config.get(f"target_{n + 1}")) and (
|
||||
direction_config := direction_conf.get(CONF_DIRECTION)
|
||||
):
|
||||
sens = await text_sensor.new_text_sensor(direction_config)
|
||||
cg.add(ld2450_component.set_direction_text_sensor(n, sens))
|
||||
|
@ -291,9 +291,7 @@ async def random_effect_to_code(config, effect_id):
|
||||
)
|
||||
async def strobe_effect_to_code(config, effect_id):
|
||||
var = cg.new_Pvariable(effect_id, config[CONF_NAME])
|
||||
colors = []
|
||||
for color in config.get(CONF_COLORS, []):
|
||||
colors.append(
|
||||
colors = [
|
||||
cg.StructInitializer(
|
||||
StrobeLightEffectColor,
|
||||
(
|
||||
@ -315,7 +313,8 @@ async def strobe_effect_to_code(config, effect_id):
|
||||
("duration", color[CONF_DURATION]),
|
||||
("transition_length", color[CONF_TRANSITION_LENGTH]),
|
||||
)
|
||||
)
|
||||
for color in config.get(CONF_COLORS, [])
|
||||
]
|
||||
cg.add(var.set_colors(colors))
|
||||
return var
|
||||
|
||||
@ -404,9 +403,7 @@ async def addressable_color_wipe_effect_to_code(config, effect_id):
|
||||
var = cg.new_Pvariable(effect_id, config[CONF_NAME])
|
||||
cg.add(var.set_add_led_interval(config[CONF_ADD_LED_INTERVAL]))
|
||||
cg.add(var.set_reverse(config[CONF_REVERSE]))
|
||||
colors = []
|
||||
for color in config.get(CONF_COLORS, []):
|
||||
colors.append(
|
||||
colors = [
|
||||
cg.StructInitializer(
|
||||
AddressableColorWipeEffectColor,
|
||||
("r", int(round(color[CONF_RED] * 255))),
|
||||
@ -417,7 +414,8 @@ async def addressable_color_wipe_effect_to_code(config, effect_id):
|
||||
("num_leds", color[CONF_NUM_LEDS]),
|
||||
("gradient", color[CONF_GRADIENT]),
|
||||
)
|
||||
)
|
||||
for color in config.get(CONF_COLORS, [])
|
||||
]
|
||||
cg.add(var.set_colors(colors))
|
||||
return var
|
||||
|
||||
@ -526,7 +524,7 @@ def validate_effects(allowed_effects):
|
||||
errors = []
|
||||
names = set()
|
||||
for i, x in enumerate(value):
|
||||
key = next(it for it in x.keys())
|
||||
key = next(it for it in x)
|
||||
if key not in allowed_effects:
|
||||
errors.append(
|
||||
cv.Invalid(
|
||||
|
@ -346,8 +346,7 @@ async def to_code(config):
|
||||
if config.get(CONF_ESP8266_STORE_LOG_STRINGS_IN_FLASH):
|
||||
cg.add_build_flag("-DUSE_STORE_LOG_STR_IN_FLASH")
|
||||
|
||||
if CORE.using_arduino:
|
||||
if config[CONF_HARDWARE_UART] == USB_CDC:
|
||||
if CORE.using_arduino and config[CONF_HARDWARE_UART] == USB_CDC:
|
||||
cg.add_build_flag("-DARDUINO_USB_CDC_ON_BOOT=1")
|
||||
if CORE.is_esp32 and get_esp32_variant() in (
|
||||
VARIANT_ESP32C3,
|
||||
|
@ -201,8 +201,7 @@ def final_validation(configs):
|
||||
multi_conf_validate(configs)
|
||||
global_config = full_config.get()
|
||||
for config in configs:
|
||||
if pages := config.get(CONF_PAGES):
|
||||
if all(p[df.CONF_SKIP] for p in pages):
|
||||
if (pages := config.get(CONF_PAGES)) and all(p[df.CONF_SKIP] for p in pages):
|
||||
raise cv.Invalid("At least one page must not be skipped")
|
||||
for display_id in config[df.CONF_DISPLAYS]:
|
||||
path = global_config.get_path_for_id(display_id)[:-1]
|
||||
|
@ -28,8 +28,9 @@ CONF_HAS_PULLDOWNS = "has_pulldowns"
|
||||
|
||||
|
||||
def check_keys(obj):
|
||||
if CONF_KEYS in obj:
|
||||
if len(obj[CONF_KEYS]) != len(obj[CONF_ROWS]) * len(obj[CONF_COLUMNS]):
|
||||
if CONF_KEYS in obj and len(obj[CONF_KEYS]) != len(obj[CONF_ROWS]) * len(
|
||||
obj[CONF_COLUMNS]
|
||||
):
|
||||
raise cv.Invalid("The number of key codes must equal the number of buttons")
|
||||
return obj
|
||||
|
||||
|
@ -124,8 +124,7 @@ async def to_code(config):
|
||||
|
||||
if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM):
|
||||
cg.add(var.set_task_stack_in_psram(task_stack_in_psram))
|
||||
if task_stack_in_psram:
|
||||
if config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
if task_stack_in_psram and config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
|
||||
)
|
||||
|
@ -63,8 +63,10 @@ def _validate(config):
|
||||
raise cv.Invalid(
|
||||
f"{axis}: {CONF_RESOLUTION} cannot be {res} with {CONF_TEMPERATURE_COMPENSATION} enabled"
|
||||
)
|
||||
if config[CONF_HALLCONF] == 0xC:
|
||||
if (config[CONF_OVERSAMPLING], config[CONF_FILTER]) in [(0, 0), (1, 0), (0, 1)]:
|
||||
if config[CONF_HALLCONF] == 0xC and (
|
||||
config[CONF_OVERSAMPLING],
|
||||
config[CONF_FILTER],
|
||||
) in [(0, 0), (1, 0), (0, 1)]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_OVERSAMPLING}=={config[CONF_OVERSAMPLING]} and {CONF_FILTER}=={config[CONF_FILTER]} not allowed with {CONF_HALLCONF}=={config[CONF_HALLCONF]:#02x}"
|
||||
)
|
||||
|
@ -56,8 +56,9 @@ def _final_validate(config):
|
||||
for binary_sensor in binary_sensors:
|
||||
if binary_sensor.get(CONF_MPR121_ID) == config[CONF_ID]:
|
||||
max_touch_channel = max(max_touch_channel, binary_sensor[CONF_CHANNEL])
|
||||
if max_touch_channel_in_config := config.get(CONF_MAX_TOUCH_CHANNEL):
|
||||
if max_touch_channel != max_touch_channel_in_config:
|
||||
if (
|
||||
max_touch_channel_in_config := config.get(CONF_MAX_TOUCH_CHANNEL)
|
||||
) and max_touch_channel != max_touch_channel_in_config:
|
||||
raise cv.Invalid(
|
||||
"Max touch channel must equal the highest binary sensor channel or be removed for auto calculation",
|
||||
path=[CONF_MAX_TOUCH_CHANNEL],
|
||||
|
@ -27,7 +27,7 @@ void MQTTButtonComponent::setup() {
|
||||
}
|
||||
void MQTTButtonComponent::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "MQTT Button '%s': ", this->button_->get_name().c_str());
|
||||
LOG_MQTT_COMPONENT(true, true);
|
||||
LOG_MQTT_COMPONENT(false, true);
|
||||
}
|
||||
|
||||
void MQTTButtonComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {
|
||||
|
@ -25,9 +25,9 @@ async def new_openthermnumber(config: dict[str, Any]) -> cg.Pvariable:
|
||||
await cg.register_component(var, config)
|
||||
input.generate_setters(var, config)
|
||||
|
||||
if (initial_value := config.get(CONF_INITIAL_VALUE, None)) is not None:
|
||||
if (initial_value := config.get(CONF_INITIAL_VALUE)) is not None:
|
||||
cg.add(var.set_initial_value(initial_value))
|
||||
if (restore_value := config.get(CONF_RESTORE_VALUE, None)) is not None:
|
||||
if (restore_value := config.get(CONF_RESTORE_VALUE)) is not None:
|
||||
cg.add(var.set_restore_value(restore_value))
|
||||
|
||||
return var
|
||||
|
@ -79,8 +79,7 @@ def set_sdkconfig_options(config):
|
||||
"CONFIG_OPENTHREAD_NETWORK_PSKC", f"{pskc:X}".lower()
|
||||
)
|
||||
|
||||
if force_dataset := config.get(CONF_FORCE_DATASET):
|
||||
if force_dataset:
|
||||
if config.get(CONF_FORCE_DATASET):
|
||||
cg.add_define("USE_OPENTHREAD_FORCE_DATASET")
|
||||
|
||||
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_DNS64_CLIENT", True)
|
||||
|
@ -89,8 +89,9 @@ def validate_(config):
|
||||
raise cv.Invalid("No sensors or binary sensors to encrypt")
|
||||
elif config[CONF_ROLLING_CODE_ENABLE]:
|
||||
raise cv.Invalid("Rolling code requires an encryption key")
|
||||
if config[CONF_PING_PONG_ENABLE]:
|
||||
if not any(CONF_ENCRYPTION in p for p in config.get(CONF_PROVIDERS) or ()):
|
||||
if config[CONF_PING_PONG_ENABLE] and not any(
|
||||
CONF_ENCRYPTION in p for p in config.get(CONF_PROVIDERS) or ()
|
||||
):
|
||||
raise cv.Invalid("Ping-pong requires at least one encrypted provider")
|
||||
return config
|
||||
|
||||
|
@ -273,7 +273,7 @@ CONFIG_SCHEMA = PIPSOLAR_COMPONENT_SCHEMA.extend(
|
||||
async def to_code(config):
|
||||
paren = await cg.get_variable(config[CONF_PIPSOLAR_ID])
|
||||
|
||||
for type, _ in TYPES.items():
|
||||
for type in TYPES:
|
||||
if type in config:
|
||||
conf = config[type]
|
||||
sens = await sensor.new_sensor(conf)
|
||||
|
@ -49,8 +49,11 @@ def validate_internal_filter(value):
|
||||
[CONF_USE_PCNT],
|
||||
)
|
||||
|
||||
if CORE.is_esp32 and use_pcnt:
|
||||
if value.get(CONF_INTERNAL_FILTER).total_microseconds > 13:
|
||||
if (
|
||||
CORE.is_esp32
|
||||
and use_pcnt
|
||||
and value.get(CONF_INTERNAL_FILTER).total_microseconds > 13
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"Maximum internal filter value when using ESP32 hardware PCNT is 13us",
|
||||
[CONF_INTERNAL_FILTER],
|
||||
|
@ -73,8 +73,7 @@ def map_sequence(value):
|
||||
|
||||
def _validate(config):
|
||||
chip = DriverChip.chips[config[CONF_MODEL]]
|
||||
if not chip.initsequence:
|
||||
if CONF_INIT_SEQUENCE not in config:
|
||||
if not chip.initsequence and CONF_INIT_SEQUENCE not in config:
|
||||
raise cv.Invalid(f"{chip.name} model requires init_sequence")
|
||||
return config
|
||||
|
||||
|
@ -24,8 +24,7 @@ QwiicPIRComponent = qwiic_pir_ns.class_(
|
||||
|
||||
|
||||
def validate_no_debounce_unless_native(config):
|
||||
if CONF_DEBOUNCE in config:
|
||||
if config[CONF_DEBOUNCE_MODE] != "NATIVE":
|
||||
if CONF_DEBOUNCE in config and config[CONF_DEBOUNCE_MODE] != "NATIVE":
|
||||
raise cv.Invalid("debounce can only be set if debounce_mode is NATIVE")
|
||||
return config
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
#include "rc522.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
// Based on:
|
||||
@ -13,30 +14,6 @@ static const char *const TAG = "rc522";
|
||||
|
||||
static const uint8_t RESET_COUNT = 5;
|
||||
|
||||
std::string format_buffer(uint8_t *b, uint8_t len) {
|
||||
char buf[32];
|
||||
int offset = 0;
|
||||
for (uint8_t i = 0; i < len; i++) {
|
||||
const char *format = "%02X";
|
||||
if (i + 1 < len)
|
||||
format = "%02X-";
|
||||
offset += sprintf(buf + offset, format, b[i]);
|
||||
}
|
||||
return std::string(buf);
|
||||
}
|
||||
|
||||
std::string format_uid(std::vector<uint8_t> &uid) {
|
||||
char buf[32];
|
||||
int offset = 0;
|
||||
for (size_t i = 0; i < uid.size(); i++) {
|
||||
const char *format = "%02X";
|
||||
if (i + 1 < uid.size())
|
||||
format = "%02X-";
|
||||
offset += sprintf(buf + offset, format, uid[i]);
|
||||
}
|
||||
return std::string(buf);
|
||||
}
|
||||
|
||||
void RC522::setup() {
|
||||
state_ = STATE_SETUP;
|
||||
// Pull device out of power down / reset state.
|
||||
@ -215,7 +192,7 @@ void RC522::loop() {
|
||||
ESP_LOGV(TAG, "STATE_READ_SERIAL_DONE -> TIMEOUT (no tag present) %d", status);
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Unexpected response. Read status is %d. Read bytes: %d (%s)", status, back_length_,
|
||||
format_buffer(buffer_, 9).c_str());
|
||||
format_hex_pretty(buffer_, back_length_, '-', false).c_str());
|
||||
}
|
||||
|
||||
state_ = STATE_DONE;
|
||||
@ -239,7 +216,7 @@ void RC522::loop() {
|
||||
|
||||
std::vector<uint8_t> rfid_uid(std::begin(uid_buffer_), std::begin(uid_buffer_) + uid_idx_);
|
||||
uid_idx_ = 0;
|
||||
// ESP_LOGD(TAG, "Processing '%s'", format_uid(rfid_uid).c_str());
|
||||
// ESP_LOGD(TAG, "Processing '%s'", format_hex_pretty(rfid_uid, '-', false).c_str());
|
||||
pcd_antenna_off_();
|
||||
state_ = STATE_INIT; // scan again on next update
|
||||
bool report = true;
|
||||
@ -260,13 +237,13 @@ void RC522::loop() {
|
||||
trigger->process(rfid_uid);
|
||||
|
||||
if (report) {
|
||||
ESP_LOGD(TAG, "Found new tag '%s'", format_uid(rfid_uid).c_str());
|
||||
ESP_LOGD(TAG, "Found new tag '%s'", format_hex_pretty(rfid_uid, '-', false).c_str());
|
||||
}
|
||||
break;
|
||||
}
|
||||
case STATE_DONE: {
|
||||
if (!this->current_uid_.empty()) {
|
||||
ESP_LOGV(TAG, "Tag '%s' removed", format_uid(this->current_uid_).c_str());
|
||||
ESP_LOGV(TAG, "Tag '%s' removed", format_hex_pretty(this->current_uid_, '-', false).c_str());
|
||||
for (auto *trigger : this->triggers_ontagremoved_)
|
||||
trigger->process(this->current_uid_);
|
||||
}
|
||||
@ -361,7 +338,7 @@ void RC522::pcd_clear_register_bit_mask_(PcdRegister reg, ///< The register to
|
||||
* @return STATUS_OK on success, STATUS_??? otherwise.
|
||||
*/
|
||||
void RC522::pcd_transceive_data_(uint8_t send_len) {
|
||||
ESP_LOGV(TAG, "PCD TRANSCEIVE: RX: %s", format_buffer(buffer_, send_len).c_str());
|
||||
ESP_LOGV(TAG, "PCD TRANSCEIVE: RX: %s", format_hex_pretty(buffer_, send_len, '-', false).c_str());
|
||||
delayMicroseconds(1000); // we need 1 ms delay between antenna on and those communication commands
|
||||
send_len_ = send_len;
|
||||
// Prepare values for BitFramingReg
|
||||
@ -435,7 +412,8 @@ RC522::StatusCode RC522::await_transceive_() {
|
||||
error_reg_value); // TODO: is this always due to collissions?
|
||||
return STATUS_ERROR;
|
||||
}
|
||||
ESP_LOGV(TAG, "received %d bytes: %s", back_length_, format_buffer(buffer_ + send_len_, back_length_).c_str());
|
||||
ESP_LOGV(TAG, "received %d bytes: %s", back_length_,
|
||||
format_hex_pretty(buffer_ + send_len_, back_length_, '-', false).c_str());
|
||||
|
||||
return STATUS_OK;
|
||||
}
|
||||
@ -499,7 +477,7 @@ bool RC522BinarySensor::process(std::vector<uint8_t> &data) {
|
||||
this->found_ = result;
|
||||
return result;
|
||||
}
|
||||
void RC522Trigger::process(std::vector<uint8_t> &data) { this->trigger(format_uid(data)); }
|
||||
void RC522Trigger::process(std::vector<uint8_t> &data) { this->trigger(format_hex_pretty(data, '-', false)); }
|
||||
|
||||
} // namespace rc522
|
||||
} // namespace esphome
|
||||
|
@ -1062,8 +1062,7 @@ def validate_raw_alternating(value):
|
||||
last_negative = None
|
||||
for i, val in enumerate(value):
|
||||
this_negative = val < 0
|
||||
if i != 0:
|
||||
if this_negative == last_negative:
|
||||
if i != 0 and this_negative == last_negative:
|
||||
raise cv.Invalid(
|
||||
f"Values must alternate between being positive and negative, please see index {i} and {i + 1}",
|
||||
[i],
|
||||
|
@ -90,8 +90,7 @@ async def to_code(config):
|
||||
|
||||
if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM):
|
||||
cg.add(var.set_task_stack_in_psram(task_stack_in_psram))
|
||||
if task_stack_in_psram:
|
||||
if config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
if task_stack_in_psram and config[CONF_TASK_STACK_IN_PSRAM]:
|
||||
esp32.add_idf_sdkconfig_option(
|
||||
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
|
||||
)
|
||||
|
@ -140,7 +140,6 @@ async def to_code(config):
|
||||
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
|
||||
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
|
||||
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
|
||||
index = 0
|
||||
dpins = []
|
||||
if CONF_RED in config[CONF_DATA_PINS]:
|
||||
red_pins = config[CONF_DATA_PINS][CONF_RED]
|
||||
@ -158,10 +157,9 @@ async def to_code(config):
|
||||
dpins = dpins[8:16] + dpins[0:8]
|
||||
else:
|
||||
dpins = config[CONF_DATA_PINS]
|
||||
for pin in dpins:
|
||||
for index, pin in enumerate(dpins):
|
||||
data_pin = await cg.gpio_pin_expression(pin)
|
||||
cg.add(var.add_data_pin(data_pin, index))
|
||||
index += 1
|
||||
|
||||
if enable_pin := config.get(CONF_ENABLE_PIN):
|
||||
enable = await cg.gpio_pin_expression(enable_pin)
|
||||
|
@ -12,7 +12,7 @@ from esphome.const import (
|
||||
UNIT_DECIBEL,
|
||||
)
|
||||
|
||||
AUTOLOAD = ["audio"]
|
||||
AUTO_LOAD = ["audio"]
|
||||
CODEOWNERS = ["@kahrendt"]
|
||||
DEPENDENCIES = ["microphone"]
|
||||
|
||||
|
@ -204,10 +204,11 @@ def _validate_pipeline(config):
|
||||
|
||||
|
||||
def _validate_repeated_speaker(config):
|
||||
if (announcement_config := config.get(CONF_ANNOUNCEMENT_PIPELINE)) and (
|
||||
media_config := config.get(CONF_MEDIA_PIPELINE)
|
||||
if (
|
||||
(announcement_config := config.get(CONF_ANNOUNCEMENT_PIPELINE))
|
||||
and (media_config := config.get(CONF_MEDIA_PIPELINE))
|
||||
and announcement_config[CONF_SPEAKER] == media_config[CONF_SPEAKER]
|
||||
):
|
||||
if announcement_config[CONF_SPEAKER] == media_config[CONF_SPEAKER]:
|
||||
raise cv.Invalid(
|
||||
"The announcement and media pipelines cannot use the same speaker. Use the `mixer` speaker component to create two source speakers."
|
||||
)
|
||||
|
@ -115,9 +115,7 @@ def get_target_platform():
|
||||
|
||||
|
||||
def get_target_variant():
|
||||
return (
|
||||
CORE.data[KEY_ESP32][KEY_VARIANT] if KEY_VARIANT in CORE.data[KEY_ESP32] else ""
|
||||
)
|
||||
return CORE.data[KEY_ESP32].get(KEY_VARIANT, "")
|
||||
|
||||
|
||||
# Get a list of available hardware interfaces based on target and variant.
|
||||
@ -213,9 +211,7 @@ def validate_hw_pins(spi, index=-1):
|
||||
return False
|
||||
if sdo_pin_no not in pin_set[CONF_MOSI_PIN]:
|
||||
return False
|
||||
if sdi_pin_no not in pin_set[CONF_MISO_PIN]:
|
||||
return False
|
||||
return True
|
||||
return sdi_pin_no in pin_set[CONF_MISO_PIN]
|
||||
return False
|
||||
|
||||
|
||||
|
@ -130,8 +130,8 @@ def validate_sprinkler(config):
|
||||
if (
|
||||
CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY in sprinkler_controller
|
||||
and CONF_VALVE_OPEN_DELAY not in sprinkler_controller
|
||||
and sprinkler_controller[CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY]
|
||||
):
|
||||
if sprinkler_controller[CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY]:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_VALVE_OPEN_DELAY} must be defined when {CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY} is enabled"
|
||||
)
|
||||
|
@ -42,10 +42,11 @@ SSD1306_MODEL = cv.enum(MODELS, upper=True, space="_")
|
||||
|
||||
def _validate(value):
|
||||
model = value[CONF_MODEL]
|
||||
if model not in ("SSD1305_128X32", "SSD1305_128X64"):
|
||||
# Contrast is default value (1.0) while brightness is not
|
||||
# Indicates user is using old `brightness` option
|
||||
if value[CONF_BRIGHTNESS] != 1.0 and value[CONF_CONTRAST] == 1.0:
|
||||
if (
|
||||
model not in ("SSD1305_128X32", "SSD1305_128X64")
|
||||
and value[CONF_BRIGHTNESS] != 1.0
|
||||
and value[CONF_CONTRAST] == 1.0
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"SSD1306/SH1106 no longer accepts brightness option, "
|
||||
'please use "contrast" instead.'
|
||||
|
@ -189,7 +189,6 @@ async def to_code(config):
|
||||
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
|
||||
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
|
||||
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
|
||||
index = 0
|
||||
dpins = []
|
||||
if CONF_RED in config[CONF_DATA_PINS]:
|
||||
red_pins = config[CONF_DATA_PINS][CONF_RED]
|
||||
@ -207,10 +206,9 @@ async def to_code(config):
|
||||
dpins = dpins[8:16] + dpins[0:8]
|
||||
else:
|
||||
dpins = config[CONF_DATA_PINS]
|
||||
for pin in dpins:
|
||||
for index, pin in enumerate(dpins):
|
||||
data_pin = await cg.gpio_pin_expression(pin)
|
||||
cg.add(var.add_data_pin(data_pin, index))
|
||||
index += 1
|
||||
|
||||
if dc_pin := config.get(CONF_DC_PIN):
|
||||
dc = await cg.gpio_pin_expression(dc_pin)
|
||||
|
@ -49,8 +49,7 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing):
|
||||
try:
|
||||
# Invoke the jinja engine to evaluate the expression.
|
||||
value, err = jinja.expand(value)
|
||||
if err is not None:
|
||||
if not ignore_missing and "password" not in path:
|
||||
if err is not None and not ignore_missing and "password" not in path:
|
||||
_LOGGER.warning(
|
||||
"Found '%s' (see %s) which looks like an expression,"
|
||||
" but could not resolve all the variables: %s",
|
||||
|
@ -1,3 +1,4 @@
|
||||
import contextlib
|
||||
import re
|
||||
|
||||
from esphome import automation
|
||||
@ -41,12 +42,10 @@ ELEVATION_MAP = {
|
||||
|
||||
def elevation(value):
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
with contextlib.suppress(cv.Invalid):
|
||||
value = ELEVATION_MAP[
|
||||
cv.one_of(*ELEVATION_MAP, lower=True, space="_")(value)
|
||||
]
|
||||
except cv.Invalid:
|
||||
pass
|
||||
value = cv.angle(value)
|
||||
return cv.float_range(min=-180, max=180)(value)
|
||||
|
||||
|
@ -41,8 +41,10 @@ SX1509KeyTrigger = sx1509_ns.class_(
|
||||
|
||||
|
||||
def check_keys(config):
|
||||
if CONF_KEYS in config:
|
||||
if len(config[CONF_KEYS]) != config[CONF_KEY_ROWS] * config[CONF_KEY_COLUMNS]:
|
||||
if (
|
||||
CONF_KEYS in config
|
||||
and len(config[CONF_KEYS]) != config[CONF_KEY_ROWS] * config[CONF_KEY_COLUMNS]
|
||||
):
|
||||
raise cv.Invalid(
|
||||
"The number of key codes must equal the number of rows * columns"
|
||||
)
|
||||
|
@ -477,8 +477,8 @@ def validate_thermostat(config):
|
||||
if (
|
||||
CONF_ON_BOOT_RESTORE_FROM in config
|
||||
and config[CONF_ON_BOOT_RESTORE_FROM] is OnBootRestoreFrom.DEFAULT_PRESET
|
||||
and CONF_DEFAULT_PRESET not in config
|
||||
):
|
||||
if CONF_DEFAULT_PRESET not in config:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_DEFAULT_PRESET} must be defined to use {CONF_ON_BOOT_RESTORE_FROM} in DEFAULT_PRESET mode"
|
||||
)
|
||||
|
@ -236,7 +236,7 @@ def validate_time_at(value):
|
||||
|
||||
def validate_cron_keys(value):
|
||||
if CONF_CRON in value:
|
||||
for key in value.keys():
|
||||
for key in value:
|
||||
if key in CRON_KEYS:
|
||||
raise cv.Invalid(f"Cannot use option {key} when cron: is specified.")
|
||||
if CONF_AT in value:
|
||||
@ -246,7 +246,7 @@ def validate_cron_keys(value):
|
||||
value.update(cron_)
|
||||
return value
|
||||
if CONF_AT in value:
|
||||
for key in value.keys():
|
||||
for key in value:
|
||||
if key in CRON_KEYS:
|
||||
raise cv.Invalid(f"Cannot use option {key} when at: is specified.")
|
||||
at_ = value[CONF_AT]
|
||||
|
@ -46,8 +46,7 @@ TuyaClimate = tuya_ns.class_("TuyaClimate", climate.Climate, cg.Component)
|
||||
|
||||
|
||||
def validate_temperature_multipliers(value):
|
||||
if CONF_TEMPERATURE_MULTIPLIER in value:
|
||||
if (
|
||||
if CONF_TEMPERATURE_MULTIPLIER in value and (
|
||||
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
|
||||
or CONF_TARGET_TEMPERATURE_MULTIPLIER in value
|
||||
):
|
||||
|
@ -34,9 +34,11 @@ def validate_min_max(config):
|
||||
min_value = config[CONF_MIN_VALUE]
|
||||
if max_value <= min_value:
|
||||
raise cv.Invalid("max_value must be greater than min_value")
|
||||
if hidden_config := config.get(CONF_DATAPOINT_HIDDEN):
|
||||
if (initial_value := hidden_config.get(CONF_INITIAL_VALUE, None)) is not None:
|
||||
if (initial_value > max_value) or (initial_value < min_value):
|
||||
if (
|
||||
(hidden_config := config.get(CONF_DATAPOINT_HIDDEN))
|
||||
and (initial_value := hidden_config.get(CONF_INITIAL_VALUE, None)) is not None
|
||||
and ((initial_value > max_value) or (initial_value < min_value))
|
||||
):
|
||||
raise cv.Invalid(
|
||||
f"{CONF_INITIAL_VALUE} must be a value between {CONF_MAX_VALUE} and {CONF_MIN_VALUE}"
|
||||
)
|
||||
|
@ -442,9 +442,7 @@ async def to_code(config):
|
||||
|
||||
if CORE.is_esp8266:
|
||||
cg.add_library("ESP8266WiFi", None)
|
||||
elif CORE.is_esp32 and CORE.using_arduino:
|
||||
cg.add_library("WiFi", None)
|
||||
elif CORE.is_rp2040:
|
||||
elif (CORE.is_esp32 and CORE.using_arduino) or CORE.is_rp2040:
|
||||
cg.add_library("WiFi", None)
|
||||
|
||||
if CORE.is_esp32 and CORE.using_esp_idf:
|
||||
|
@ -198,10 +198,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
||||
self.output_paths.remove((path, domain))
|
||||
|
||||
def is_in_error_path(self, path: ConfigPath) -> bool:
|
||||
for err in self.errors:
|
||||
if _path_begins_with(err.path, path):
|
||||
return True
|
||||
return False
|
||||
return any(_path_begins_with(err.path, path) for err in self.errors)
|
||||
|
||||
def set_by_path(self, path, value):
|
||||
conf = self
|
||||
@ -224,7 +221,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
||||
for index, path_item in enumerate(path):
|
||||
try:
|
||||
if path_item in data:
|
||||
key_data = [x for x in data.keys() if x == path_item][0]
|
||||
key_data = [x for x in data if x == path_item][0]
|
||||
if isinstance(key_data, ESPHomeDataBase):
|
||||
doc_range = key_data.esp_range
|
||||
if get_key and index == len(path) - 1:
|
||||
@ -1081,7 +1078,7 @@ def dump_dict(
|
||||
ret += "{}"
|
||||
multiline = False
|
||||
|
||||
for k in conf.keys():
|
||||
for k in conf:
|
||||
path_ = path + [k]
|
||||
error = config.get_error_for_path(path_)
|
||||
if error is not None:
|
||||
@ -1097,10 +1094,7 @@ def dump_dict(
|
||||
msg = f"\n{indent(msg)}"
|
||||
|
||||
if inf is not None:
|
||||
if m:
|
||||
msg = f" {inf}{msg}"
|
||||
else:
|
||||
msg = f"{msg} {inf}"
|
||||
msg = f" {inf}{msg}" if m else f"{msg} {inf}"
|
||||
ret += f"{st + msg}\n"
|
||||
elif isinstance(conf, str):
|
||||
if is_secret(conf):
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
from contextlib import contextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from ipaddress import (
|
||||
@ -2113,10 +2113,8 @@ def require_esphome_version(year, month, patch):
|
||||
|
||||
@contextmanager
|
||||
def suppress_invalid():
|
||||
try:
|
||||
with suppress(vol.Invalid):
|
||||
yield
|
||||
except vol.Invalid:
|
||||
pass
|
||||
|
||||
|
||||
GIT_SCHEMA = Schema(
|
||||
|
@ -317,7 +317,7 @@ def preload_core_config(config, result) -> str:
|
||||
|
||||
target_platforms = []
|
||||
|
||||
for domain, _ in config.items():
|
||||
for domain in config:
|
||||
if domain.startswith("."):
|
||||
continue
|
||||
if _is_target_platform(domain):
|
||||
|
@ -65,7 +65,7 @@ static void validate_static_string(const char *name) {
|
||||
|
||||
// Common implementation for both timeout and interval
|
||||
void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type type, bool is_static_string,
|
||||
const void *name_ptr, uint32_t delay, std::function<void()> func) {
|
||||
const void *name_ptr, uint32_t delay, std::function<void()> func, bool is_retry) {
|
||||
// Get the name as const char*
|
||||
const char *name_cstr = this->get_name_cstr_(is_static_string, name_ptr);
|
||||
|
||||
@ -130,6 +130,18 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type
|
||||
#endif /* ESPHOME_DEBUG_SCHEDULER */
|
||||
|
||||
LockGuard guard{this->lock_};
|
||||
|
||||
// For retries, check if there's a cancelled timeout first
|
||||
if (is_retry && name_cstr != nullptr && type == SchedulerItem::TIMEOUT &&
|
||||
(has_cancelled_timeout_in_container_(this->items_, component, name_cstr) ||
|
||||
has_cancelled_timeout_in_container_(this->to_add_, component, name_cstr))) {
|
||||
// Skip scheduling - the retry was cancelled
|
||||
#ifdef ESPHOME_DEBUG_SCHEDULER
|
||||
ESP_LOGD(TAG, "Skipping retry '%s' - found cancelled item", name_cstr);
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
|
||||
// If name is provided, do atomic cancel-and-add
|
||||
// Cancel existing items
|
||||
this->cancel_item_locked_(component, name_cstr, type);
|
||||
@ -178,12 +190,14 @@ struct RetryArgs {
|
||||
Scheduler *scheduler;
|
||||
};
|
||||
|
||||
static void retry_handler(const std::shared_ptr<RetryArgs> &args) {
|
||||
void retry_handler(const std::shared_ptr<RetryArgs> &args) {
|
||||
RetryResult const retry_result = args->func(--args->retry_countdown);
|
||||
if (retry_result == RetryResult::DONE || args->retry_countdown <= 0)
|
||||
return;
|
||||
// second execution of `func` happens after `initial_wait_time`
|
||||
args->scheduler->set_timeout(args->component, args->name, args->current_interval, [args]() { retry_handler(args); });
|
||||
args->scheduler->set_timer_common_(
|
||||
args->component, Scheduler::SchedulerItem::TIMEOUT, false, &args->name, args->current_interval,
|
||||
[args]() { retry_handler(args); }, true);
|
||||
// backoff_increase_factor applied to third & later executions
|
||||
args->current_interval *= args->backoff_increase_factor;
|
||||
}
|
||||
|
@ -15,8 +15,15 @@
|
||||
namespace esphome {
|
||||
|
||||
class Component;
|
||||
struct RetryArgs;
|
||||
|
||||
// Forward declaration of retry_handler - needs to be non-static for friend declaration
|
||||
void retry_handler(const std::shared_ptr<RetryArgs> &args);
|
||||
|
||||
class Scheduler {
|
||||
// Allow retry_handler to access protected members
|
||||
friend void ::esphome::retry_handler(const std::shared_ptr<RetryArgs> &args);
|
||||
|
||||
public:
|
||||
// Public API - accepts std::string for backward compatibility
|
||||
void set_timeout(Component *component, const std::string &name, uint32_t timeout, std::function<void()> func);
|
||||
@ -147,7 +154,7 @@ class Scheduler {
|
||||
|
||||
// Common implementation for both timeout and interval
|
||||
void set_timer_common_(Component *component, SchedulerItem::Type type, bool is_static_string, const void *name_ptr,
|
||||
uint32_t delay, std::function<void()> func);
|
||||
uint32_t delay, std::function<void()> func, bool is_retry = false);
|
||||
|
||||
uint64_t millis_64_(uint32_t now);
|
||||
// Cleanup logically deleted items from the scheduler
|
||||
@ -170,8 +177,8 @@ class Scheduler {
|
||||
|
||||
// Helper function to check if item matches criteria for cancellation
|
||||
inline bool HOT matches_item_(const std::unique_ptr<SchedulerItem> &item, Component *component, const char *name_cstr,
|
||||
SchedulerItem::Type type) {
|
||||
if (item->component != component || item->type != type || item->remove) {
|
||||
SchedulerItem::Type type, bool skip_removed = true) const {
|
||||
if (item->component != component || item->type != type || (skip_removed && item->remove)) {
|
||||
return false;
|
||||
}
|
||||
const char *item_name = item->get_name();
|
||||
@ -197,6 +204,18 @@ class Scheduler {
|
||||
return item->remove || (item->component != nullptr && item->component->is_failed());
|
||||
}
|
||||
|
||||
// Template helper to check if any item in a container matches our criteria
|
||||
template<typename Container>
|
||||
bool has_cancelled_timeout_in_container_(const Container &container, Component *component,
|
||||
const char *name_cstr) const {
|
||||
for (const auto &item : container) {
|
||||
if (item->remove && this->matches_item_(item, component, name_cstr, SchedulerItem::TIMEOUT, false)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
Mutex lock_;
|
||||
std::vector<std::unique_ptr<SchedulerItem>> items_;
|
||||
std::vector<std::unique_ptr<SchedulerItem>> to_add_;
|
||||
|
@ -1037,10 +1037,7 @@ class MockObjClass(MockObj):
|
||||
def inherits_from(self, other: "MockObjClass") -> bool:
|
||||
if str(self) == str(other):
|
||||
return True
|
||||
for parent in self._parents:
|
||||
if str(parent) == str(other):
|
||||
return True
|
||||
return False
|
||||
return any(str(parent) == str(other) for parent in self._parents)
|
||||
|
||||
def template(self, *args: SafeExpType) -> "MockObjClass":
|
||||
if len(args) != 1 or not isinstance(args[0], TemplateArguments):
|
||||
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from asyncio import events
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
@ -125,10 +126,8 @@ def start_dashboard(args) -> None:
|
||||
|
||||
asyncio.set_event_loop_policy(DashboardEventLoopPolicy(settings.verbose))
|
||||
|
||||
try:
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
asyncio.run(async_start(args))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
async def async_start(args) -> None:
|
||||
|
@ -144,7 +144,7 @@ def websocket_class(cls):
|
||||
if not hasattr(cls, "_message_handlers"):
|
||||
cls._message_handlers = {}
|
||||
|
||||
for _, method in cls.__dict__.items():
|
||||
for method in cls.__dict__.values():
|
||||
if hasattr(method, "_message_handler"):
|
||||
cls._message_handlers[method._message_handler] = method
|
||||
|
||||
|
@ -88,10 +88,7 @@ def recv_decode(sock, amount, decode=True):
|
||||
|
||||
|
||||
def receive_exactly(sock, amount, msg, expect, decode=True):
|
||||
if decode:
|
||||
data = []
|
||||
else:
|
||||
data = b""
|
||||
data = [] if decode else b""
|
||||
|
||||
try:
|
||||
data += recv_decode(sock, 1, decode=decode)
|
||||
|
@ -96,9 +96,7 @@ def cpp_string_escape(string, encoding="utf-8"):
|
||||
def _should_escape(byte: int) -> bool:
|
||||
if not 32 <= byte < 127:
|
||||
return True
|
||||
if byte in (ord("\\"), ord('"')):
|
||||
return True
|
||||
return False
|
||||
return byte in (ord("\\"), ord('"'))
|
||||
|
||||
if isinstance(string, str):
|
||||
string = string.encode(encoding)
|
||||
|
@ -61,7 +61,7 @@ class ESPHomeLogFormatter(logging.Formatter):
|
||||
}.get(record.levelname, "")
|
||||
message = f"{prefix}{formatted}{AnsiStyle.RESET_ALL.value}"
|
||||
if CORE.dashboard:
|
||||
try:
|
||||
try: # noqa: SIM105
|
||||
message = message.replace("\033", "\\033")
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
@ -1,3 +1,4 @@
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
import json
|
||||
@ -52,10 +53,8 @@ def initialize(
|
||||
client = prepare(
|
||||
config, subscriptions, on_message, on_connect, username, password, client_id
|
||||
)
|
||||
try:
|
||||
with contextlib.suppress(KeyboardInterrupt):
|
||||
client.loop_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -141,9 +141,11 @@ def _load_idedata(config):
|
||||
temp_idedata = Path(CORE.relative_internal_path("idedata", f"{CORE.name}.json"))
|
||||
|
||||
changed = False
|
||||
if not platformio_ini.is_file() or not temp_idedata.is_file():
|
||||
changed = True
|
||||
elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime:
|
||||
if (
|
||||
not platformio_ini.is_file()
|
||||
or not temp_idedata.is_file()
|
||||
or platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime
|
||||
):
|
||||
changed = True
|
||||
|
||||
if not changed:
|
||||
|
@ -59,7 +59,7 @@ def safe_print(message="", end="\n"):
|
||||
from esphome.core import CORE
|
||||
|
||||
if CORE.dashboard:
|
||||
try:
|
||||
try: # noqa: SIM105
|
||||
message = message.replace("\033", "\\033")
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
|
@ -116,10 +116,7 @@ def wizard_file(**kwargs):
|
||||
kwargs["fallback_name"] = ap_name
|
||||
kwargs["fallback_psk"] = "".join(random.choice(letters) for _ in range(12))
|
||||
|
||||
if kwargs.get("friendly_name"):
|
||||
base = BASE_CONFIG_FRIENDLY
|
||||
else:
|
||||
base = BASE_CONFIG
|
||||
base = BASE_CONFIG_FRIENDLY if kwargs.get("friendly_name") else BASE_CONFIG
|
||||
|
||||
config = base.format(**kwargs)
|
||||
|
||||
|
@ -86,18 +86,14 @@ def storage_should_clean(old: StorageJSON, new: StorageJSON) -> bool:
|
||||
|
||||
if old.src_version != new.src_version:
|
||||
return True
|
||||
if old.build_path != new.build_path:
|
||||
return True
|
||||
|
||||
return False
|
||||
return old.build_path != new.build_path
|
||||
|
||||
|
||||
def storage_should_update_cmake_cache(old: StorageJSON, new: StorageJSON) -> bool:
|
||||
if (
|
||||
old.loaded_integrations != new.loaded_integrations
|
||||
or old.loaded_platforms != new.loaded_platforms
|
||||
):
|
||||
if new.core_platform == PLATFORM_ESP32:
|
||||
) and new.core_platform == PLATFORM_ESP32:
|
||||
from esphome.components.esp32 import FRAMEWORK_ESP_IDF
|
||||
|
||||
return new.framework == FRAMEWORK_ESP_IDF
|
||||
|
@ -56,8 +56,11 @@ class ESPHomeDataBase:
|
||||
def from_node(self, node):
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self._esp_range = DocumentRange.from_marks(node.start_mark, node.end_mark)
|
||||
if isinstance(node, yaml.ScalarNode):
|
||||
if node.style is not None and node.style in "|>":
|
||||
if (
|
||||
isinstance(node, yaml.ScalarNode)
|
||||
and node.style is not None
|
||||
and node.style in "|>"
|
||||
):
|
||||
self._content_offset = 1
|
||||
|
||||
def from_database(self, database):
|
||||
|
@ -40,6 +40,7 @@ lib_deps =
|
||||
functionpointer/arduino-MLX90393@1.0.2 ; mlx90393
|
||||
pavlodn/HaierProtocol@0.9.31 ; haier
|
||||
kikuchan98/pngle@1.1.0 ; online_image
|
||||
https://github.com/esphome/TinyGPSPlus.git#v1.1.0 ; gps
|
||||
; Using the repository directly, otherwise ESP-IDF can't use the library
|
||||
https://github.com/bitbank2/JPEGDEC.git#ca1e0f2 ; online_image
|
||||
; This is using the repository until a new release is published to PlatformIO
|
||||
@ -73,7 +74,6 @@ lib_deps =
|
||||
heman/AsyncMqttClient-esphome@1.0.0 ; mqtt
|
||||
ESP32Async/ESPAsyncWebServer@3.7.8 ; web_server_base
|
||||
fastled/FastLED@3.9.16 ; fastled_base
|
||||
mikalhart/TinyGPSPlus@1.1.0 ; gps
|
||||
freekode/TM1651@1.0.1 ; tm1651
|
||||
glmnet/Dsmr@0.7 ; dsmr
|
||||
rweather/Crypto@0.4.0 ; dsmr
|
||||
@ -180,13 +180,6 @@ build_unflags =
|
||||
${common.build_unflags}
|
||||
extra_scripts = post:esphome/components/esp32/post_build.py.script
|
||||
|
||||
; This are common settings for the ESP32 using the latest ESP-IDF version.
|
||||
[common:esp32-idf-5_3]
|
||||
extends = common:esp32-idf
|
||||
platform = platformio/espressif32@6.8.0
|
||||
platform_packages =
|
||||
platformio/framework-espidf@~3.50300.0
|
||||
|
||||
; These are common settings for the RP2040 using Arduino.
|
||||
[common:rp2040-arduino]
|
||||
extends = common:arduino
|
||||
@ -239,6 +232,7 @@ lib_deps =
|
||||
wjtje/qr-code-generator-library@1.7.0 ; qr_code
|
||||
pavlodn/HaierProtocol@0.9.31 ; haier
|
||||
functionpointer/arduino-MLX90393@1.0.2 ; mlx90393
|
||||
https://github.com/esphome/TinyGPSPlus.git#v1.1.0 ; gps
|
||||
https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library
|
||||
lvgl/lvgl@8.4.0 ; lvgl
|
||||
|
||||
@ -298,17 +292,6 @@ build_flags =
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32-idf-5_3]
|
||||
extends = common:esp32-idf-5_3
|
||||
board = esp32dev
|
||||
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32-idf
|
||||
build_flags =
|
||||
${common:esp32-idf.build_flags}
|
||||
${flags:runtime.build_flags}
|
||||
-DUSE_ESP32_VARIANT_ESP32
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32-idf-tidy]
|
||||
extends = common:esp32-idf
|
||||
board = esp32dev
|
||||
@ -353,17 +336,6 @@ build_flags =
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32c3-idf-5_3]
|
||||
extends = common:esp32-idf-5_3
|
||||
board = esp32-c3-devkitm-1
|
||||
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32c3-idf
|
||||
build_flags =
|
||||
${common:esp32-idf.build_flags}
|
||||
${flags:runtime.build_flags}
|
||||
-DUSE_ESP32_VARIANT_ESP32C3
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32c3-idf-tidy]
|
||||
extends = common:esp32-idf
|
||||
board = esp32-c3-devkitm-1
|
||||
@ -419,17 +391,6 @@ build_flags =
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32s2-idf-5_3]
|
||||
extends = common:esp32-idf-5_3
|
||||
board = esp32-s2-kaluga-1
|
||||
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32s2-idf
|
||||
build_flags =
|
||||
${common:esp32-idf.build_flags}
|
||||
${flags:runtime.build_flags}
|
||||
-DUSE_ESP32_VARIANT_ESP32S2
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32s2-idf-tidy]
|
||||
extends = common:esp32-idf
|
||||
board = esp32-s2-kaluga-1
|
||||
@ -474,17 +435,6 @@ build_flags =
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32s3-idf-5_3]
|
||||
extends = common:esp32-idf-5_3
|
||||
board = esp32-s3-devkitc-1
|
||||
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32s3-idf
|
||||
build_flags =
|
||||
${common:esp32-idf.build_flags}
|
||||
${flags:runtime.build_flags}
|
||||
-DUSE_ESP32_VARIANT_ESP32S3
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
[env:esp32s3-idf-tidy]
|
||||
extends = common:esp32-idf
|
||||
board = esp32-s3-devkitc-1
|
||||
@ -565,6 +515,8 @@ build_flags =
|
||||
build_unflags =
|
||||
${common.build_unflags}
|
||||
|
||||
;;;;;;;; Host ;;;;;;;;
|
||||
|
||||
[env:host]
|
||||
extends = common
|
||||
platform = platformio/native
|
||||
|
@ -114,7 +114,9 @@ select = [
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes/autoflake
|
||||
"I", # isort
|
||||
"PERF", # performance
|
||||
"PL", # pylint
|
||||
"SIM", # flake8-simplify
|
||||
"UP", # pyupgrade
|
||||
]
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
pylint==3.3.7
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.12.4 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.12.5 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
@ -61,9 +61,7 @@ def indent_list(text: str, padding: str = " ") -> list[str]:
|
||||
"""Indent each line of the given text with the specified padding."""
|
||||
lines = []
|
||||
for line in text.splitlines():
|
||||
if line == "":
|
||||
p = ""
|
||||
elif line.startswith("#ifdef") or line.startswith("#endif"):
|
||||
if line == "" or line.startswith("#ifdef") or line.startswith("#endif"):
|
||||
p = ""
|
||||
else:
|
||||
p = padding
|
||||
@ -2388,7 +2386,7 @@ static const char *const TAG = "api.service";
|
||||
needs_conn = get_opt(m, pb.needs_setup_connection, True)
|
||||
needs_auth = get_opt(m, pb.needs_authentication, True)
|
||||
|
||||
ifdef = message_ifdef_map.get(inp, ifdefs.get(inp, None))
|
||||
ifdef = message_ifdef_map.get(inp, ifdefs.get(inp))
|
||||
|
||||
if ifdef is not None:
|
||||
hpp += f"#ifdef {ifdef}\n"
|
||||
|
@ -71,10 +71,12 @@ def get_component_names():
|
||||
skip_components = []
|
||||
|
||||
for d in os.listdir(CORE_COMPONENTS_PATH):
|
||||
if not d.startswith("__") and os.path.isdir(
|
||||
os.path.join(CORE_COMPONENTS_PATH, d)
|
||||
if (
|
||||
not d.startswith("__")
|
||||
and os.path.isdir(os.path.join(CORE_COMPONENTS_PATH, d))
|
||||
and d not in component_names
|
||||
and d not in skip_components
|
||||
):
|
||||
if d not in component_names and d not in skip_components:
|
||||
component_names.append(d)
|
||||
|
||||
return sorted(component_names)
|
||||
@ -139,10 +141,9 @@ def register_module_schemas(key, module, manifest=None):
|
||||
for name, schema in module_schemas(module):
|
||||
register_known_schema(key, name, schema)
|
||||
|
||||
if manifest:
|
||||
if manifest and manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]:
|
||||
# Multi conf should allow list of components
|
||||
# not sure about 2nd part of the if, might be useless config (e.g. as3935)
|
||||
if manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]:
|
||||
output[key][S_SCHEMAS][S_CONFIG_SCHEMA]["is_list"] = True
|
||||
|
||||
|
||||
@ -230,7 +231,7 @@ def add_module_registries(domain, module):
|
||||
reg_type = attr_name.partition("_")[0].lower()
|
||||
found_registries[repr(attr_obj)] = f"{domain}.{reg_type}"
|
||||
|
||||
for name in attr_obj.keys():
|
||||
for name in attr_obj:
|
||||
if "." not in name:
|
||||
reg_entry_name = name
|
||||
else:
|
||||
@ -700,7 +701,7 @@ def is_convertible_schema(schema):
|
||||
if repr(schema) in ejs.registry_schemas:
|
||||
return True
|
||||
if isinstance(schema, dict):
|
||||
for k in schema.keys():
|
||||
for k in schema:
|
||||
if isinstance(k, (cv.Required, cv.Optional)):
|
||||
return True
|
||||
return False
|
||||
@ -818,7 +819,7 @@ def convert(schema, config_var, path):
|
||||
elif schema_type == "automation":
|
||||
extra_schema = None
|
||||
config_var[S_TYPE] = "trigger"
|
||||
if automation.AUTOMATION_SCHEMA == ejs.extended_schemas[repr(data)][0]:
|
||||
if ejs.extended_schemas[repr(data)][0] == automation.AUTOMATION_SCHEMA:
|
||||
extra_schema = ejs.extended_schemas[repr(data)][1]
|
||||
if (
|
||||
extra_schema is not None and len(extra_schema) > 1
|
||||
@ -926,8 +927,7 @@ def convert(schema, config_var, path):
|
||||
config = convert_config(schema_type, path + "/type_" + schema_key)
|
||||
types[schema_key] = config["schema"]
|
||||
|
||||
elif DUMP_UNKNOWN:
|
||||
if S_TYPE not in config_var:
|
||||
elif DUMP_UNKNOWN and S_TYPE not in config_var:
|
||||
config_var["unknown"] = repr_schema
|
||||
|
||||
if DUMP_PATH:
|
||||
|
@ -66,9 +66,10 @@ def main():
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
files = []
|
||||
for path in git_ls_files(["*.cpp", "*.h", "*.tcc"]):
|
||||
files.append(os.path.relpath(path, os.getcwd()))
|
||||
cwd = os.getcwd()
|
||||
files = [
|
||||
os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp", "*.h", "*.tcc"])
|
||||
]
|
||||
|
||||
if args.files:
|
||||
# Match against files specified on command-line
|
||||
|
@ -219,9 +219,8 @@ def main():
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
files = []
|
||||
for path in git_ls_files(["*.cpp"]):
|
||||
files.append(os.path.relpath(path, os.getcwd()))
|
||||
cwd = os.getcwd()
|
||||
files = [os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp"])]
|
||||
|
||||
# Print initial file count if it's large
|
||||
if len(files) > 50:
|
||||
|
@ -365,9 +365,11 @@ def load_idedata(environment: str) -> dict[str, Any]:
|
||||
platformio_ini = Path(root_path) / "platformio.ini"
|
||||
temp_idedata = Path(temp_folder) / f"idedata-{environment}.json"
|
||||
changed = False
|
||||
if not platformio_ini.is_file() or not temp_idedata.is_file():
|
||||
changed = True
|
||||
elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime:
|
||||
if (
|
||||
not platformio_ini.is_file()
|
||||
or not temp_idedata.is_file()
|
||||
or platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime
|
||||
):
|
||||
changed = True
|
||||
|
||||
if "idf" in environment:
|
||||
|
@ -41,11 +41,12 @@ CONFIG_NEWLIB_LIBC=y
|
||||
return include_paths
|
||||
|
||||
def extract_defines(command):
|
||||
defines = []
|
||||
define_pattern = re.compile(r"-D\s*([^\s]+)")
|
||||
for match in define_pattern.findall(command):
|
||||
if match not in ("_ASMLANGUAGE"):
|
||||
defines.append(match)
|
||||
defines = [
|
||||
match
|
||||
for match in define_pattern.findall(command)
|
||||
if match not in ("_ASMLANGUAGE")
|
||||
]
|
||||
return defines
|
||||
|
||||
def find_cxx_path(commands):
|
||||
@ -78,13 +79,14 @@ CONFIG_NEWLIB_LIBC=y
|
||||
return include_paths
|
||||
|
||||
def extract_cxx_flags(command):
|
||||
flags = []
|
||||
# Extracts CXXFLAGS from the command string, excluding includes and defines.
|
||||
flag_pattern = re.compile(
|
||||
r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)"
|
||||
)
|
||||
for match in flag_pattern.findall(command):
|
||||
flags.append(match.replace("-imacros ", "-imacros"))
|
||||
flags = [
|
||||
match.replace("-imacros ", "-imacros")
|
||||
for match in flag_pattern.findall(command)
|
||||
]
|
||||
return flags
|
||||
|
||||
def transform_to_idedata_format(compile_commands):
|
||||
|
@ -1,3 +1,7 @@
|
||||
button:
|
||||
- platform: factory_reset
|
||||
name: Reset to Factory Default Settings
|
||||
|
||||
factory_reset:
|
||||
resets_required: 5
|
||||
max_delay: 10s
|
||||
|
1
tests/components/factory_reset/test.bk72xx-ard.yaml
Normal file
1
tests/components/factory_reset/test.bk72xx-ard.yaml
Normal file
@ -0,0 +1 @@
|
||||
<<: !include common.yaml
|
@ -1 +1,4 @@
|
||||
esp8266:
|
||||
restore_from_flash: true
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@ -1 +1,3 @@
|
||||
<<: !include common.yaml
|
||||
button:
|
||||
- platform: factory_reset
|
||||
name: Reset to Factory Default Settings
|
||||
|
5
tests/components/gps/test.esp32-c3-idf.yaml
Normal file
5
tests/components/gps/test.esp32-c3-idf.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
<<: !include common.yaml
|
5
tests/components/gps/test.esp32-idf.yaml
Normal file
5
tests/components/gps/test.esp32-idf.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO12
|
||||
rx_pin: GPIO14
|
||||
|
||||
<<: !include common.yaml
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user