Merge branch 'dev' into integration

This commit is contained in:
J. Nick Koston 2025-07-25 08:42:21 -10:00
commit 76a63e5d55
No known key found for this signature in database
112 changed files with 1396 additions and 1107 deletions

View File

@ -1 +1 @@
7920671c938a5ea6a11ac4594204b5ec8f38d579c962bf1f185e8d5e3ad879be 32b0db73b3ae01ba18c9cbb1dabbd8156bc14dded500471919bd0a3dc33916e0

View File

@ -14,6 +14,7 @@ env:
SMALL_PR_THRESHOLD: 30 SMALL_PR_THRESHOLD: 30
MAX_LABELS: 15 MAX_LABELS: 15
TOO_BIG_THRESHOLD: 1000 TOO_BIG_THRESHOLD: 1000
COMPONENT_LABEL_THRESHOLD: 10
jobs: jobs:
label: label:
@ -23,24 +24,6 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Get changes
id: changes
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Get PR number
pr_number="${{ github.event.pull_request.number }}"
# Get list of changed files using gh CLI
files=$(gh pr diff $pr_number --name-only)
echo "files<<EOF" >> $GITHUB_OUTPUT
echo "$files" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get file stats (additions + deletions) using gh CLI
stats=$(gh pr view $pr_number --json files --jq '.files | map(.additions + .deletions) | add')
echo "total_changes=${stats:-0}" >> $GITHUB_OUTPUT
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@v2 uses: actions/create-github-app-token@v2
@ -55,93 +38,466 @@ jobs:
script: | script: |
const fs = require('fs'); const fs = require('fs');
// Constants
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
const TOO_BIG_MARKER = '<!-- too-big-request -->';
const MANAGED_LABELS = [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck'
];
const DOCS_PR_PATTERNS = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
// Global state
const { owner, repo } = context.repo; const { owner, repo } = context.repo;
const pr_number = context.issue.number; const pr_number = context.issue.number;
// Hidden marker to identify bot comments from this workflow // Get current labels and PR data
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
// Get current labels
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({ const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner, owner,
repo, repo,
issue_number: pr_number issue_number: pr_number
}); });
const currentLabels = currentLabelsData.map(label => label.name); const currentLabels = currentLabelsData.map(label => label.name);
// Define managed labels that this workflow controls
const managedLabels = currentLabels.filter(label => const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || label.startsWith('component: ') || MANAGED_LABELS.includes(label)
[
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'too-big',
'labeller-recheck'
].includes(label)
); );
// Check for mega-PR early - if present, skip most automatic labeling
const isMegaPR = currentLabels.includes('mega-pr');
// Get all PR files with automatic pagination
const prFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner,
repo,
pull_number: pr_number
}
);
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalChanges = prFiles.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
console.log('Current labels:', currentLabels.join(', ')); console.log('Current labels:', currentLabels.join(', '));
console.log('Managed labels:', managedLabels.join(', '));
// Get changed files
const changedFiles = `${{ steps.changes.outputs.files }}`.split('\n').filter(f => f.length > 0);
const totalChanges = parseInt('${{ steps.changes.outputs.total_changes }}') || 0;
console.log('Changed files:', changedFiles.length); console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges); console.log('Total changes:', totalChanges);
if (isMegaPR) {
const labels = new Set(); console.log('Mega-PR detected - applying limited labeling logic');
// Fetch TARGET_PLATFORMS and PLATFORM_COMPONENTS from API
let targetPlatforms = [];
let platformComponents = [];
try {
const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
// Extract target platforms and platform components directly from API
targetPlatforms = componentsData.target_platforms || [];
platformComponents = componentsData.platform_components || [];
console.log('Target platforms from API:', targetPlatforms.length, targetPlatforms);
console.log('Platform components from API:', platformComponents.length, platformComponents);
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
} }
// Get environment variables // Fetch API data
const smallPrThreshold = parseInt('${{ env.SMALL_PR_THRESHOLD }}'); async function fetchApiData() {
const maxLabels = parseInt('${{ env.MAX_LABELS }}'); try {
const tooBigThreshold = parseInt('${{ env.TOO_BIG_THRESHOLD }}'); const response = await fetch('https://data.esphome.io/components.json');
const componentsData = await response.json();
return {
targetPlatforms: componentsData.target_platforms || [],
platformComponents: componentsData.platform_components || []
};
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
return { targetPlatforms: [], platformComponents: [] };
}
}
// Strategy: Merge branch detection
async function detectMergeBranch() {
const labels = new Set();
const baseRef = context.payload.pull_request.base.ref;
// Strategy: Merge to release or beta branch
const baseRef = context.payload.pull_request.base.ref;
if (baseRef !== 'dev') {
if (baseRef === 'release') { if (baseRef === 'release') {
labels.add('merging-to-release'); labels.add('merging-to-release');
} else if (baseRef === 'beta') { } else if (baseRef === 'beta') {
labels.add('merging-to-beta'); labels.add('merging-to-beta');
} }
// When targeting non-dev branches, only use merge warning labels return labels;
const finalLabels = Array.from(labels); }
// Strategy: Component and platform labeling
async function detectComponentPlatforms(apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents() {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges() {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize() {
const labels = new Set();
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
}
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges() {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges() {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner() {
const labels = new Set();
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// First escape all regex special chars except *, then replace * with .*
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests() {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: Requirements detection
async function detectRequirements(allLabels) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
// Generate review messages
function generateReviewMessages(finalLabels) {
const messages = [];
const prAuthor = context.payload.pull_request.user.login;
// Too big message
if (finalLabels.includes('too-big')) {
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
const tooManyLabels = finalLabels.length > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${finalLabels.length} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${finalLabels.length} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(finalLabels) {
const reviewMessages = generateReviewMessages(finalLabels);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Main execution
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for non-dev branches
if (baseRef !== 'dev') {
const branchLabels = await detectMergeBranch();
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', ')); console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Add new labels // Apply labels
if (finalLabels.length > 0) { if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({ await github.rest.issues.addLabels({
owner, owner,
repo, repo,
@ -150,13 +506,9 @@ jobs:
}); });
} }
// Remove old managed labels that are no longer needed // Remove old managed labels
const labelsToRemove = managedLabels.filter(label => const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
!finalLabels.includes(label)
);
for (const label of labelsToRemove) { for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try { try {
await github.rest.issues.removeLabel({ await github.rest.issues.removeLabel({
owner, owner,
@ -169,324 +521,78 @@ jobs:
} }
} }
return; // Exit early, don't process other strategies return;
} }
// Strategy: Component and Platform labeling // Run all strategies
const componentRegex = /^esphome\/components\/([^\/]+)\//; const [
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${targetPlatforms.join('|')})/`); branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels
] = await Promise.all([
detectMergeBranch(),
detectComponentPlatforms(apiData),
detectNewComponents(),
detectNewPlatforms(apiData),
detectCoreChanges(),
detectPRSize(),
detectDashboardChanges(),
detectGitHubActionsChanges(),
detectCodeOwner(),
detectTests()
]);
for (const file of changedFiles) { // Combine all labels
// Check for component changes const allLabels = new Set([
const componentMatch = file.match(componentRegex); ...branchLabels,
if (componentMatch) { ...componentLabels,
const component = componentMatch[1]; ...newComponentLabels,
labels.add(`component: ${component}`); ...newPlatformLabels,
} ...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels
]);
// Check for target platform changes // Detect requirements based on all other labels
const platformMatch = file.match(targetPlatformRegex); const requirementLabels = await detectRequirements(allLabels);
if (platformMatch) { for (const label of requirementLabels) {
const targetPlatform = platformMatch[1]; allLabels.add(label);
labels.add(`platform: ${targetPlatform}`); }
let finalLabels = Array.from(allLabels);
// For mega-PRs, exclude component labels if there are too many
if (isMegaPR) {
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
} }
} }
// Get PR files for new component/platform detection // Handle too many labels (only for non-mega PRs)
const { data: prFiles } = await github.rest.pulls.listFiles({ const tooManyLabels = finalLabels.length > MAX_LABELS;
owner,
repo,
pull_number: pr_number
});
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename); if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
finalLabels = ['too-big'];
// Calculate changes excluding root tests directory for too-big calculation
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
console.log(`Test changes: ${testChanges}, Non-test changes: ${nonTestChanges}`);
// Strategy: New Component detection
for (const file of addedFiles) {
// Check for new component files: esphome/components/{component}/__init__.py
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
// Read the content directly from the filesystem since we have it checked out
const content = fs.readFileSync(file, 'utf8');
// Strategy: New Target Platform detection
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
labels.add('new-component');
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
// Fallback: assume it's a new component if we can't read the content
labels.add('new-component');
}
}
} }
// Strategy: New Platform detection
for (const file of addedFiles) {
// Check for new platform files: esphome/components/{component}/{platform}.py
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
// Check for new platform files: esphome/components/{component}/{platform}/__init__.py
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
// Strategy: Small PR detection
if (totalChanges <= smallPrThreshold) {
labels.add('small-pr');
}
// Strategy: Dashboard changes
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
// Strategy: GitHub Actions changes
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
// Strategy: Code Owner detection
try {
// Fetch CODEOWNERS file from the repository (in case it was changed in this PR)
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
// Parse CODEOWNERS file
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
let isCodeOwner = false;
// Precompile CODEOWNERS patterns into regex objects
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
// Directory pattern like "esphome/components/api/*"
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// Glob pattern
const regexPattern = pattern
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
.replace(/\\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
// Exact match
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file)) {
// Check if PR author is in the owners list
if (owners.some(owner => owner === `@${prAuthor}`)) {
isCodeOwner = true;
break;
}
}
}
if (isCodeOwner) break;
}
if (isCodeOwner) {
labels.add('by-code-owner');
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
// Strategy: Test detection
const testFiles = changedFiles.filter(file =>
file.startsWith('tests/')
);
if (testFiles.length > 0) {
labels.add('has-tests');
} else {
// Only check for needs-tests if this is a new component or new platform
if (labels.has('new-component') || labels.has('new-platform')) {
labels.add('needs-tests');
}
}
// Strategy: Documentation check for new components/platforms
if (labels.has('new-component') || labels.has('new-platform')) {
const prBody = context.payload.pull_request.body || '';
// Look for documentation PR links
// Patterns to match:
// - https://github.com/esphome/esphome-docs/pull/1234
// - esphome/esphome-docs#1234
const docsPrPatterns = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
const hasDocsLink = docsPrPatterns.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Convert Set to Array
let finalLabels = Array.from(labels);
console.log('Computed labels:', finalLabels.join(', ')); console.log('Computed labels:', finalLabels.join(', '));
// Check if PR has mega-pr label // Handle reviews
const isMegaPR = currentLabels.includes('mega-pr'); await handleReviews(finalLabels);
// Check if PR is too big (either too many labels or too many line changes) // Apply labels
const tooManyLabels = finalLabels.length > maxLabels;
const tooManyChanges = nonTestChanges > tooBigThreshold;
if ((tooManyLabels || tooManyChanges) && !isMegaPR) {
const originalLength = finalLabels.length;
console.log(`PR is too big - Labels: ${originalLength}, Changes: ${totalChanges} (non-test: ${nonTestChanges})`);
// Get all reviews on this PR to check for existing bot reviews
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
// Check if there's already an active bot review requesting changes
const existingBotReview = reviews.find(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
// If too big due to line changes only, keep original labels and add too-big
// If too big due to too many labels, replace with just too-big
if (tooManyChanges && !tooManyLabels) {
finalLabels.push('too-big');
} else {
finalLabels = ['too-big'];
}
// Only create a new review if there isn't already an active bot review
if (!existingBotReview) {
// Create appropriate review message
let reviewBody;
if (tooManyLabels && tooManyChanges) {
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLength} different components/areas. Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
} else if (tooManyLabels) {
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR affects ${originalLength} different components/areas. Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
} else {
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR is too large with ${nonTestChanges} line changes (excluding tests). Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
}
// Request changes on the PR
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new "too big" review requesting changes');
} else {
console.log('Skipping review creation - existing bot review already requesting changes');
}
} else {
// Check if PR was previously too big but is now acceptable
const wasPreviouslyTooBig = currentLabels.includes('too-big');
if (wasPreviouslyTooBig || isMegaPR) {
console.log('PR is no longer too big or has mega-pr label - dismissing bot reviews');
// Get all reviews on this PR to find reviews to dismiss
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
// Find bot reviews that requested changes
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
// Dismiss bot reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: isMegaPR ?
'Review dismissed: mega-pr label was added' :
'Review dismissed: PR size is now acceptable'
});
console.log(`Dismissed review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Add new labels
if (finalLabels.length > 0) { if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`); console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({ await github.rest.issues.addLabels({
@ -497,11 +603,8 @@ jobs:
}); });
} }
// Remove old managed labels that are no longer needed // Remove old managed labels
const labelsToRemove = managedLabels.filter(label => const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
!finalLabels.includes(label)
);
for (const label of labelsToRemove) { for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`); console.log(`Removing label: ${label}`);
try { try {

View File

@ -11,7 +11,7 @@ ci:
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version. # Ruff version.
rev: v0.12.4 rev: v0.12.5
hooks: hooks:
# Run the linter. # Run the linter.
- id: ruff - id: ruff

View File

@ -89,9 +89,9 @@ def choose_prompt(options, purpose: str = None):
def choose_upload_log_host( def choose_upload_log_host(
default, check_default, show_ota, show_mqtt, show_api, purpose: str = None default, check_default, show_ota, show_mqtt, show_api, purpose: str = None
): ):
options = [] options = [
for port in get_serial_ports(): (f"{port.path} ({port.description})", port.path) for port in get_serial_ports()
options.append((f"{port.path} ({port.description})", port.path)) ]
if default == "SERIAL": if default == "SERIAL":
return choose_prompt(options, purpose=purpose) return choose_prompt(options, purpose=purpose)
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config): if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
@ -119,9 +119,7 @@ def mqtt_logging_enabled(mqtt_config):
return False return False
if CONF_TOPIC not in log_topic: if CONF_TOPIC not in log_topic:
return False return False
if log_topic.get(CONF_LEVEL, None) == "NONE": return log_topic.get(CONF_LEVEL, None) != "NONE"
return False
return True
def get_port_type(port): def get_port_type(port):

View File

@ -14,6 +14,8 @@ with warnings.catch_warnings():
from aioesphomeapi import APIClient, parse_log_message from aioesphomeapi import APIClient, parse_log_message
from aioesphomeapi.log_runner import async_run from aioesphomeapi.log_runner import async_run
import contextlib
from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__ from esphome.const import CONF_KEY, CONF_PASSWORD, CONF_PORT, __version__
from esphome.core import CORE from esphome.core import CORE
@ -66,7 +68,5 @@ async def async_run_logs(config: dict[str, Any], address: str) -> None:
def run_logs(config: dict[str, Any], address: str) -> None: def run_logs(config: dict[str, Any], address: str) -> None:
"""Run the logs command.""" """Run the logs command."""
try: with contextlib.suppress(KeyboardInterrupt):
asyncio.run(async_run_logs(config, address)) asyncio.run(async_run_logs(config, address))
except KeyboardInterrupt:
pass

View File

@ -266,8 +266,10 @@ async def delayed_off_filter_to_code(config, filter_id):
async def autorepeat_filter_to_code(config, filter_id): async def autorepeat_filter_to_code(config, filter_id):
timings = [] timings = []
if len(config) > 0: if len(config) > 0:
for conf in config: timings.extend(
timings.append((conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])) (conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
for conf in config
)
else: else:
timings.append( timings.append(
( (
@ -573,16 +575,15 @@ async def setup_binary_sensor_core_(var, config):
await automation.build_automation(trigger, [], conf) await automation.build_automation(trigger, [], conf)
for conf in config.get(CONF_ON_MULTI_CLICK, []): for conf in config.get(CONF_ON_MULTI_CLICK, []):
timings = [] timings = [
for tim in conf[CONF_TIMING]: cg.StructInitializer(
timings.append( MultiClickTriggerEvent,
cg.StructInitializer( ("state", tim[CONF_STATE]),
MultiClickTriggerEvent, ("min_length", tim[CONF_MIN_LENGTH]),
("state", tim[CONF_STATE]), ("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
("min_length", tim[CONF_MIN_LENGTH]),
("max_length", tim.get(CONF_MAX_LENGTH, 4294967294)),
)
) )
for tim in conf[CONF_TIMING]
]
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var, timings) trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var, timings)
if CONF_INVALID_COOLDOWN in conf: if CONF_INVALID_COOLDOWN in conf:
cg.add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN])) cg.add(trigger.set_invalid_cooldown(conf[CONF_INVALID_COOLDOWN]))

View File

@ -22,9 +22,8 @@ def validate_id(config):
if CONF_CAN_ID in config: if CONF_CAN_ID in config:
can_id = config[CONF_CAN_ID] can_id = config[CONF_CAN_ID]
id_ext = config[CONF_USE_EXTENDED_ID] id_ext = config[CONF_USE_EXTENDED_ID]
if not id_ext: if not id_ext and can_id > 0x7FF:
if can_id > 0x7FF: raise cv.Invalid("Standard IDs must be 11 Bit (0x000-0x7ff / 0-2047)")
raise cv.Invalid("Standard IDs must be 11 Bit (0x000-0x7ff / 0-2047)")
return config return config

View File

@ -74,8 +74,7 @@ def range_segment_list(input):
if isinstance(input, list): if isinstance(input, list):
for list_item in input: for list_item in input:
if isinstance(list_item, list): if isinstance(list_item, list):
for item in list_item: flat_list.extend(list_item)
flat_list.append(item)
else: else:
flat_list.append(list_item) flat_list.append(list_item)
else: else:

View File

@ -973,14 +973,16 @@ def _write_idf_component_yml():
# Called by writer.py # Called by writer.py
def copy_files(): def copy_files():
if CORE.using_arduino: if (
if "partitions.csv" not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]: CORE.using_arduino
write_file_if_changed( and "partitions.csv" not in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES]
CORE.relative_build_path("partitions.csv"), ):
get_arduino_partition_csv( write_file_if_changed(
CORE.platformio_options.get("board_upload.flash_size") CORE.relative_build_path("partitions.csv"),
), get_arduino_partition_csv(
) CORE.platformio_options.get("board_upload.flash_size")
),
)
if CORE.using_esp_idf: if CORE.using_esp_idf:
_write_sdkconfig() _write_sdkconfig()
_write_idf_component_yml() _write_idf_component_yml()
@ -1000,7 +1002,7 @@ def copy_files():
__version__, __version__,
) )
for _, file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].items(): for file in CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES].values():
if file[KEY_PATH].startswith("http"): if file[KEY_PATH].startswith("http"):
import requests import requests

View File

@ -140,20 +140,22 @@ VALUE_TYPES = {
def validate_char_on_write(char_config): def validate_char_on_write(char_config):
if CONF_ON_WRITE in char_config: if (
if not char_config[CONF_WRITE] and not char_config[CONF_WRITE_NO_RESPONSE]: CONF_ON_WRITE in char_config
raise cv.Invalid( and not char_config[CONF_WRITE]
f"{CONF_ON_WRITE} requires the {CONF_WRITE} or {CONF_WRITE_NO_RESPONSE} property to be set" and not char_config[CONF_WRITE_NO_RESPONSE]
) ):
raise cv.Invalid(
f"{CONF_ON_WRITE} requires the {CONF_WRITE} or {CONF_WRITE_NO_RESPONSE} property to be set"
)
return char_config return char_config
def validate_descriptor(desc_config): def validate_descriptor(desc_config):
if CONF_ON_WRITE in desc_config: if CONF_ON_WRITE in desc_config and not desc_config[CONF_WRITE]:
if not desc_config[CONF_WRITE]: raise cv.Invalid(
raise cv.Invalid( f"{CONF_ON_WRITE} requires the {CONF_WRITE} property to be set"
f"{CONF_ON_WRITE} requires the {CONF_WRITE} property to be set" )
)
if CONF_MAX_LENGTH not in desc_config: if CONF_MAX_LENGTH not in desc_config:
value = desc_config[CONF_VALUE][CONF_DATA] value = desc_config[CONF_VALUE][CONF_DATA]
if cg.is_template(value): if cg.is_template(value):

View File

@ -310,9 +310,7 @@ async def to_code(config):
for conf in config.get(CONF_ON_BLE_ADVERTISE, []): for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var) trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
if CONF_MAC_ADDRESS in conf: if CONF_MAC_ADDRESS in conf:
addr_list = [] addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
for it in conf[CONF_MAC_ADDRESS]:
addr_list.append(it.as_hex)
cg.add(trigger.set_addresses(addr_list)) cg.add(trigger.set_addresses(addr_list))
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf) await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []): for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):

View File

@ -294,9 +294,8 @@ async def to_code(config):
) )
) )
if get_esp32_variant() == VARIANT_ESP32: if get_esp32_variant() == VARIANT_ESP32 and CONF_IIR_FILTER in config:
if CONF_IIR_FILTER in config: cg.add(touch.set_iir_filter(config[CONF_IIR_FILTER]))
cg.add(touch.set_iir_filter(config[CONF_IIR_FILTER]))
if get_esp32_variant() == VARIANT_ESP32S2 or get_esp32_variant() == VARIANT_ESP32S3: if get_esp32_variant() == VARIANT_ESP32S2 or get_esp32_variant() == VARIANT_ESP32S3:
if CONF_FILTER_MODE in config: if CONF_FILTER_MODE in config:

View File

@ -245,7 +245,7 @@ async def to_code(config):
if ver <= cv.Version(2, 3, 0): if ver <= cv.Version(2, 3, 0):
# No ld script support # No ld script support
ld_script = None ld_script = None
if ver <= cv.Version(2, 4, 2): elif ver <= cv.Version(2, 4, 2):
# Old ld script path # Old ld script path
ld_script = ld_scripts[0] ld_script = ld_scripts[0]
else: else:

View File

@ -73,8 +73,7 @@ def ota_esphome_final_validate(config):
else: else:
new_ota_conf.append(ota_conf) new_ota_conf.append(ota_conf)
for port_conf in merged_ota_esphome_configs_by_port.values(): new_ota_conf.extend(merged_ota_esphome_configs_by_port.values())
new_ota_conf.append(port_conf)
full_conf[CONF_OTA] = new_ota_conf full_conf[CONF_OTA] = new_ota_conf
fv.full_config.set(full_conf) fv.full_config.set(full_conf)

View File

@ -112,7 +112,7 @@ def _is_framework_spi_polling_mode_supported():
return True return True
if cv.Version(5, 3, 0) > framework_version >= cv.Version(5, 2, 1): if cv.Version(5, 3, 0) > framework_version >= cv.Version(5, 2, 1):
return True return True
if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4): if cv.Version(5, 2, 0) > framework_version >= cv.Version(5, 1, 4): # noqa: SIM103
return True return True
return False return False
if CORE.using_arduino: if CORE.using_arduino:

View File

@ -1,5 +1,97 @@
from esphome.automation import Trigger, build_automation, validate_automation
import esphome.codegen as cg import esphome.codegen as cg
from esphome.components.esp8266 import CONF_RESTORE_FROM_FLASH, KEY_ESP8266
import esphome.config_validation as cv
from esphome.const import (
CONF_ID,
CONF_TRIGGER_ID,
PLATFORM_BK72XX,
PLATFORM_ESP32,
PLATFORM_ESP8266,
PLATFORM_LN882X,
PLATFORM_RTL87XX,
)
from esphome.core import CORE
from esphome.final_validate import full_config
CODEOWNERS = ["@anatoly-savchenkov"] CODEOWNERS = ["@anatoly-savchenkov"]
factory_reset_ns = cg.esphome_ns.namespace("factory_reset") factory_reset_ns = cg.esphome_ns.namespace("factory_reset")
FactoryResetComponent = factory_reset_ns.class_("FactoryResetComponent", cg.Component)
FastBootTrigger = factory_reset_ns.class_("FastBootTrigger", Trigger, cg.Component)
CONF_MAX_DELAY = "max_delay"
CONF_RESETS_REQUIRED = "resets_required"
CONF_ON_INCREMENT = "on_increment"
def _validate(config):
if CONF_RESETS_REQUIRED in config:
return cv.only_on(
[
PLATFORM_BK72XX,
PLATFORM_ESP32,
PLATFORM_ESP8266,
PLATFORM_LN882X,
PLATFORM_RTL87XX,
]
)(config)
if CONF_ON_INCREMENT in config:
raise cv.Invalid(
f"'{CONF_ON_INCREMENT}' requires a value for '{CONF_RESETS_REQUIRED}'"
)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
cv.GenerateID(): cv.declare_id(FactoryResetComponent),
cv.Optional(CONF_MAX_DELAY, default="10s"): cv.All(
cv.positive_time_period_seconds,
cv.Range(min=cv.TimePeriod(milliseconds=1000)),
),
cv.Optional(CONF_RESETS_REQUIRED): cv.positive_not_null_int,
cv.Optional(CONF_ON_INCREMENT): validate_automation(
{
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(FastBootTrigger),
}
),
}
).extend(cv.COMPONENT_SCHEMA),
_validate,
)
def _final_validate(config):
if CORE.is_esp8266 and CONF_RESETS_REQUIRED in config:
fconfig = full_config.get()
if not fconfig.get_config_for_path([KEY_ESP8266, CONF_RESTORE_FROM_FLASH]):
raise cv.Invalid(
"'resets_required' needs 'restore_from_flash' to be enabled in the 'esp8266' configuration"
)
return config
FINAL_VALIDATE_SCHEMA = _final_validate
async def to_code(config):
if reset_count := config.get(CONF_RESETS_REQUIRED):
var = cg.new_Pvariable(
config[CONF_ID],
reset_count,
config[CONF_MAX_DELAY].total_milliseconds,
)
await cg.register_component(var, config)
for conf in config.get(CONF_ON_INCREMENT, []):
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
await build_automation(
trigger,
[
(cg.uint8, "x"),
(cg.uint8, "target"),
],
conf,
)

View File

@ -0,0 +1,76 @@
#include "factory_reset.h"
#include "esphome/core/application.h"
#include "esphome/core/hal.h"
#include "esphome/core/log.h"
#include <cinttypes>
#if !defined(USE_RP2040) && !defined(USE_HOST)
namespace esphome {
namespace factory_reset {
static const char *const TAG = "factory_reset";
static const uint32_t POWER_CYCLES_KEY = 0xFA5C0DE;
static bool was_power_cycled() {
#ifdef USE_ESP32
return esp_reset_reason() == ESP_RST_POWERON;
#endif
#ifdef USE_ESP8266
auto reset_reason = EspClass::getResetReason();
return strcasecmp(reset_reason.c_str(), "power On") == 0 || strcasecmp(reset_reason.c_str(), "external system") == 0;
#endif
#ifdef USE_LIBRETINY
auto reason = lt_get_reboot_reason();
return reason == REBOOT_REASON_POWER || reason == REBOOT_REASON_HARDWARE;
#endif
}
void FactoryResetComponent::dump_config() {
uint8_t count = 0;
this->flash_.load(&count);
ESP_LOGCONFIG(TAG, "Factory Reset by Reset:");
ESP_LOGCONFIG(TAG,
" Max interval between resets %" PRIu32 " seconds\n"
" Current count: %u\n"
" Factory reset after %u resets",
this->max_interval_ / 1000, count, this->required_count_);
}
void FactoryResetComponent::save_(uint8_t count) {
this->flash_.save(&count);
global_preferences->sync();
this->defer([count, this] { this->increment_callback_.call(count, this->required_count_); });
}
void FactoryResetComponent::setup() {
this->flash_ = global_preferences->make_preference<uint8_t>(POWER_CYCLES_KEY, true);
if (was_power_cycled()) {
uint8_t count = 0;
this->flash_.load(&count);
// this is a power on reset or external system reset
count++;
if (count == this->required_count_) {
ESP_LOGW(TAG, "Reset count reached, factory resetting");
global_preferences->reset();
// delay to allow log to be sent
delay(100); // NOLINT
App.safe_reboot(); // should not return
}
this->save_(count);
ESP_LOGD(TAG, "Power on reset detected, incremented count to %u", count);
this->set_timeout(this->max_interval_, [this]() {
ESP_LOGD(TAG, "No reset in the last %" PRIu32 " seconds, resetting count", this->max_interval_ / 1000);
this->save_(0); // reset count
});
} else {
this->save_(0); // reset count if not a power cycle
}
}
} // namespace factory_reset
} // namespace esphome
#endif // !defined(USE_RP2040) && !defined(USE_HOST)

View File

@ -0,0 +1,43 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/automation.h"
#include "esphome/core/preferences.h"
#if !defined(USE_RP2040) && !defined(USE_HOST)
#ifdef USE_ESP32
#include <esp_system.h>
#endif
namespace esphome {
namespace factory_reset {
class FactoryResetComponent : public Component {
public:
FactoryResetComponent(uint8_t required_count, uint32_t max_interval)
: required_count_(required_count), max_interval_(max_interval) {}
void dump_config() override;
void setup() override;
void add_increment_callback(std::function<void(uint8_t, uint8_t)> &&callback) {
this->increment_callback_.add(std::move(callback));
}
protected:
~FactoryResetComponent() = default;
void save_(uint8_t count);
ESPPreferenceObject flash_{}; // saves the number of fast power cycles
uint8_t required_count_; // The number of boot attempts before fast boot is enabled
uint32_t max_interval_; // max interval between power cycles
CallbackManager<void(uint8_t, uint8_t)> increment_callback_{};
};
class FastBootTrigger : public Trigger<uint8_t, uint8_t> {
public:
explicit FastBootTrigger(FactoryResetComponent *parent) {
parent->add_increment_callback([this](uint8_t current, uint8_t target) { this->trigger(current, target); });
}
};
} // namespace factory_reset
} // namespace esphome
#endif // !defined(USE_RP2040) && !defined(USE_HOST)

View File

@ -55,9 +55,7 @@ CONFIG_SCHEMA = cv.All(
async def to_code(config): async def to_code(config):
var = await fastled_base.new_fastled_light(config) var = await fastled_base.new_fastled_light(config)
rgb_order = cg.RawExpression( rgb_order = cg.RawExpression(config.get(CONF_RGB_ORDER, "RGB"))
config[CONF_RGB_ORDER] if CONF_RGB_ORDER in config else "RGB"
)
data_rate = None data_rate = None
if CONF_DATA_RATE in config: if CONF_DATA_RATE in config:

View File

@ -84,7 +84,6 @@ CONFIG_SCHEMA = cv.All(
) )
.extend(cv.polling_component_schema("20s")) .extend(cv.polling_component_schema("20s"))
.extend(uart.UART_DEVICE_SCHEMA), .extend(uart.UART_DEVICE_SCHEMA),
cv.only_with_arduino,
) )
FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema("gps", require_rx=True) FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema("gps", require_rx=True)
@ -123,4 +122,9 @@ async def to_code(config):
cg.add(var.set_hdop_sensor(sens)) cg.add(var.set_hdop_sensor(sens))
# https://platformio.org/lib/show/1655/TinyGPSPlus # https://platformio.org/lib/show/1655/TinyGPSPlus
cg.add_library("mikalhart/TinyGPSPlus", "1.1.0") # Using fork of TinyGPSPlus patched to build on ESP-IDF
cg.add_library(
"TinyGPSPlus",
None,
"https://github.com/esphome/TinyGPSPlus.git#v1.1.0",
)

View File

@ -1,5 +1,3 @@
#ifdef USE_ARDUINO
#include "gps.h" #include "gps.h"
#include "esphome/core/log.h" #include "esphome/core/log.h"
@ -22,73 +20,76 @@ void GPS::dump_config() {
} }
void GPS::update() { void GPS::update() {
if (this->latitude_sensor_ != nullptr) if (this->latitude_sensor_ != nullptr) {
this->latitude_sensor_->publish_state(this->latitude_); this->latitude_sensor_->publish_state(this->latitude_);
}
if (this->longitude_sensor_ != nullptr) if (this->longitude_sensor_ != nullptr) {
this->longitude_sensor_->publish_state(this->longitude_); this->longitude_sensor_->publish_state(this->longitude_);
}
if (this->speed_sensor_ != nullptr) if (this->speed_sensor_ != nullptr) {
this->speed_sensor_->publish_state(this->speed_); this->speed_sensor_->publish_state(this->speed_);
}
if (this->course_sensor_ != nullptr) if (this->course_sensor_ != nullptr) {
this->course_sensor_->publish_state(this->course_); this->course_sensor_->publish_state(this->course_);
}
if (this->altitude_sensor_ != nullptr) if (this->altitude_sensor_ != nullptr) {
this->altitude_sensor_->publish_state(this->altitude_); this->altitude_sensor_->publish_state(this->altitude_);
}
if (this->satellites_sensor_ != nullptr) if (this->satellites_sensor_ != nullptr) {
this->satellites_sensor_->publish_state(this->satellites_); this->satellites_sensor_->publish_state(this->satellites_);
}
if (this->hdop_sensor_ != nullptr) if (this->hdop_sensor_ != nullptr) {
this->hdop_sensor_->publish_state(this->hdop_); this->hdop_sensor_->publish_state(this->hdop_);
}
} }
void GPS::loop() { void GPS::loop() {
while (this->available() > 0 && !this->has_time_) { while (this->available() > 0 && !this->has_time_) {
if (this->tiny_gps_.encode(this->read())) { if (!this->tiny_gps_.encode(this->read())) {
if (this->tiny_gps_.location.isUpdated()) { return;
this->latitude_ = this->tiny_gps_.location.lat(); }
this->longitude_ = this->tiny_gps_.location.lng(); if (this->tiny_gps_.location.isUpdated()) {
this->latitude_ = this->tiny_gps_.location.lat();
this->longitude_ = this->tiny_gps_.location.lng();
ESP_LOGV(TAG, "Latitude, Longitude: %.6f°, %.6f°", this->latitude_, this->longitude_);
}
ESP_LOGD(TAG, "Location:"); if (this->tiny_gps_.speed.isUpdated()) {
ESP_LOGD(TAG, " Lat: %.6f °", this->latitude_); this->speed_ = this->tiny_gps_.speed.kmph();
ESP_LOGD(TAG, " Lon: %.6f °", this->longitude_); ESP_LOGV(TAG, "Speed: %.3f km/h", this->speed_);
} }
if (this->tiny_gps_.speed.isUpdated()) { if (this->tiny_gps_.course.isUpdated()) {
this->speed_ = this->tiny_gps_.speed.kmph(); this->course_ = this->tiny_gps_.course.deg();
ESP_LOGD(TAG, "Speed: %.3f km/h", this->speed_); ESP_LOGV(TAG, "Course: %.2f°", this->course_);
} }
if (this->tiny_gps_.course.isUpdated()) { if (this->tiny_gps_.altitude.isUpdated()) {
this->course_ = this->tiny_gps_.course.deg(); this->altitude_ = this->tiny_gps_.altitude.meters();
ESP_LOGD(TAG, "Course: %.2f °", this->course_); ESP_LOGV(TAG, "Altitude: %.2f m", this->altitude_);
} }
if (this->tiny_gps_.altitude.isUpdated()) { if (this->tiny_gps_.satellites.isUpdated()) {
this->altitude_ = this->tiny_gps_.altitude.meters(); this->satellites_ = this->tiny_gps_.satellites.value();
ESP_LOGD(TAG, "Altitude: %.2f m", this->altitude_); ESP_LOGV(TAG, "Satellites: %d", this->satellites_);
} }
if (this->tiny_gps_.satellites.isUpdated()) { if (this->tiny_gps_.hdop.isUpdated()) {
this->satellites_ = this->tiny_gps_.satellites.value(); this->hdop_ = this->tiny_gps_.hdop.hdop();
ESP_LOGD(TAG, "Satellites: %d", this->satellites_); ESP_LOGV(TAG, "HDOP: %.3f", this->hdop_);
} }
if (this->tiny_gps_.hdop.isUpdated()) { for (auto *listener : this->listeners_) {
this->hdop_ = this->tiny_gps_.hdop.hdop(); listener->on_update(this->tiny_gps_);
ESP_LOGD(TAG, "HDOP: %.3f", this->hdop_);
}
for (auto *listener : this->listeners_) {
listener->on_update(this->tiny_gps_);
}
} }
} }
} }
} // namespace gps } // namespace gps
} // namespace esphome } // namespace esphome
#endif // USE_ARDUINO

View File

@ -1,10 +1,8 @@
#pragma once #pragma once
#ifdef USE_ARDUINO
#include "esphome/core/component.h"
#include "esphome/components/uart/uart.h"
#include "esphome/components/sensor/sensor.h" #include "esphome/components/sensor/sensor.h"
#include "esphome/components/uart/uart.h"
#include "esphome/core/component.h"
#include <TinyGPSPlus.h> #include <TinyGPSPlus.h>
#include <vector> #include <vector>
@ -53,8 +51,9 @@ class GPS : public PollingComponent, public uart::UARTDevice {
float speed_{NAN}; float speed_{NAN};
float course_{NAN}; float course_{NAN};
float altitude_{NAN}; float altitude_{NAN};
uint16_t satellites_{0};
float hdop_{NAN}; float hdop_{NAN};
uint16_t satellites_{0};
bool has_time_{false};
sensor::Sensor *latitude_sensor_{nullptr}; sensor::Sensor *latitude_sensor_{nullptr};
sensor::Sensor *longitude_sensor_{nullptr}; sensor::Sensor *longitude_sensor_{nullptr};
@ -64,12 +63,9 @@ class GPS : public PollingComponent, public uart::UARTDevice {
sensor::Sensor *satellites_sensor_{nullptr}; sensor::Sensor *satellites_sensor_{nullptr};
sensor::Sensor *hdop_sensor_{nullptr}; sensor::Sensor *hdop_sensor_{nullptr};
bool has_time_{false};
TinyGPSPlus tiny_gps_; TinyGPSPlus tiny_gps_;
std::vector<GPSListener *> listeners_{}; std::vector<GPSListener *> listeners_{};
}; };
} // namespace gps } // namespace gps
} // namespace esphome } // namespace esphome
#endif // USE_ARDUINO

View File

@ -1,5 +1,3 @@
#ifdef USE_ARDUINO
#include "gps_time.h" #include "gps_time.h"
#include "esphome/core/log.h" #include "esphome/core/log.h"
@ -9,12 +7,10 @@ namespace gps {
static const char *const TAG = "gps.time"; static const char *const TAG = "gps.time";
void GPSTime::from_tiny_gps_(TinyGPSPlus &tiny_gps) { void GPSTime::from_tiny_gps_(TinyGPSPlus &tiny_gps) {
if (!tiny_gps.time.isValid() || !tiny_gps.date.isValid()) if (!tiny_gps.time.isValid() || !tiny_gps.date.isValid() || !tiny_gps.time.isUpdated() ||
return; !tiny_gps.date.isUpdated() || tiny_gps.date.year() < 2025) {
if (!tiny_gps.time.isUpdated() || !tiny_gps.date.isUpdated())
return;
if (tiny_gps.date.year() < 2019)
return; return;
}
ESPTime val{}; ESPTime val{};
val.year = tiny_gps.date.year(); val.year = tiny_gps.date.year();
@ -34,5 +30,3 @@ void GPSTime::from_tiny_gps_(TinyGPSPlus &tiny_gps) {
} // namespace gps } // namespace gps
} // namespace esphome } // namespace esphome
#endif // USE_ARDUINO

View File

@ -1,10 +1,8 @@
#pragma once #pragma once
#ifdef USE_ARDUINO
#include "esphome/core/component.h"
#include "esphome/components/time/real_time_clock.h"
#include "esphome/components/gps/gps.h" #include "esphome/components/gps/gps.h"
#include "esphome/components/time/real_time_clock.h"
#include "esphome/core/component.h"
namespace esphome { namespace esphome {
namespace gps { namespace gps {
@ -13,8 +11,9 @@ class GPSTime : public time::RealTimeClock, public GPSListener {
public: public:
void update() override { this->from_tiny_gps_(this->get_tiny_gps()); }; void update() override { this->from_tiny_gps_(this->get_tiny_gps()); };
void on_update(TinyGPSPlus &tiny_gps) override { void on_update(TinyGPSPlus &tiny_gps) override {
if (!this->has_time_) if (!this->has_time_) {
this->from_tiny_gps_(tiny_gps); this->from_tiny_gps_(tiny_gps);
}
} }
protected: protected:
@ -24,5 +23,3 @@ class GPSTime : public time::RealTimeClock, public GPSListener {
} // namespace gps } // namespace gps
} // namespace esphome } // namespace esphome
#endif // USE_ARDUINO

View File

@ -116,7 +116,7 @@ GRAPH_SCHEMA = cv.Schema(
def _relocate_fields_to_subfolder(config, subfolder, subschema): def _relocate_fields_to_subfolder(config, subfolder, subschema):
fields = [k.schema for k in subschema.schema.keys()] fields = [k.schema for k in subschema.schema]
fields.remove(CONF_ID) fields.remove(CONF_ID)
if subfolder in config: if subfolder in config:
# Ensure no ambiguous fields in base of config # Ensure no ambiguous fields in base of config

View File

@ -70,9 +70,8 @@ def validate_url(value):
def validate_ssl_verification(config): def validate_ssl_verification(config):
error_message = "" error_message = ""
if CORE.is_esp32: if CORE.is_esp32 and not CORE.using_esp_idf and config[CONF_VERIFY_SSL]:
if not CORE.using_esp_idf and config[CONF_VERIFY_SSL]: error_message = "ESPHome supports certificate verification only via ESP-IDF"
error_message = "ESPHome supports certificate verification only via ESP-IDF"
if CORE.is_rp2040 and config[CONF_VERIFY_SSL]: if CORE.is_rp2040 and config[CONF_VERIFY_SSL]:
error_message = "ESPHome does not support certificate verification on RP2040" error_message = "ESPHome does not support certificate verification on RP2040"

View File

@ -66,11 +66,10 @@ PROTOCOL_NAMES = {
def _validate(config): def _validate(config):
for conf, models in SUPPORTED_OPTIONS.items(): for conf, models in SUPPORTED_OPTIONS.items():
if conf in config: if conf in config and config[CONF_MODEL] not in models:
if config[CONF_MODEL] not in models: raise cv.Invalid(
raise cv.Invalid( f"{conf} is only available on {' and '.join(models)}, not {config[CONF_MODEL]}"
f"{conf} is only available on {' and '.join(models)}, not {config[CONF_MODEL]}" )
)
return config return config

View File

@ -243,10 +243,7 @@ def _final_validate(_):
def use_legacy(): def use_legacy():
if CORE.using_esp_idf: return not (CORE.using_esp_idf and not _use_legacy_driver)
if not _use_legacy_driver:
return False
return True
FINAL_VALIDATE_SCHEMA = _final_validate FINAL_VALIDATE_SCHEMA = _final_validate

View File

@ -44,9 +44,8 @@ PDM_VARIANTS = [esp32.const.VARIANT_ESP32, esp32.const.VARIANT_ESP32S3]
def _validate_esp32_variant(config): def _validate_esp32_variant(config):
variant = esp32.get_esp32_variant() variant = esp32.get_esp32_variant()
if config[CONF_ADC_TYPE] == "external": if config[CONF_ADC_TYPE] == "external":
if config[CONF_PDM]: if config[CONF_PDM] and variant not in PDM_VARIANTS:
if variant not in PDM_VARIANTS: raise cv.Invalid(f"{variant} does not support PDM")
raise cv.Invalid(f"{variant} does not support PDM")
return config return config
if config[CONF_ADC_TYPE] == "internal": if config[CONF_ADC_TYPE] == "internal":
if variant not in INTERNAL_ADC_VARIANTS: if variant not in INTERNAL_ADC_VARIANTS:
@ -122,9 +121,8 @@ CONFIG_SCHEMA = cv.All(
def _final_validate(config): def _final_validate(config):
if not use_legacy(): if not use_legacy() and config[CONF_ADC_TYPE] == "internal":
if config[CONF_ADC_TYPE] == "internal": raise cv.Invalid("Internal ADC is only compatible with legacy i2s driver.")
raise cv.Invalid("Internal ADC is only compatible with legacy i2s driver.")
FINAL_VALIDATE_SCHEMA = _final_validate FINAL_VALIDATE_SCHEMA = _final_validate

View File

@ -138,9 +138,10 @@ def _validate(config):
]: ]:
raise cv.Invalid("Selected model can't run on ESP8266.") raise cv.Invalid("Selected model can't run on ESP8266.")
if model == "CUSTOM": if model == "CUSTOM" and (
if CONF_INIT_SEQUENCE not in config or CONF_DIMENSIONS not in config: CONF_INIT_SEQUENCE not in config or CONF_DIMENSIONS not in config
raise cv.Invalid("CUSTOM model requires init_sequence and dimensions") ):
raise cv.Invalid("CUSTOM model requires init_sequence and dimensions")
return config return config

View File

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import contextlib
import hashlib import hashlib
import io import io
import logging import logging
@ -174,9 +175,8 @@ class ImageGrayscale(ImageEncoder):
b = 1 b = 1
if self.invert_alpha: if self.invert_alpha:
b ^= 0xFF b ^= 0xFF
if self.transparency == CONF_ALPHA_CHANNEL: if self.transparency == CONF_ALPHA_CHANNEL and a != 0xFF:
if a != 0xFF: b = a
b = a
self.data[self.index] = b self.data[self.index] = b
self.index += 1 self.index += 1
@ -672,10 +672,8 @@ async def write_image(config, all_frames=False):
invert_alpha = config[CONF_INVERT_ALPHA] invert_alpha = config[CONF_INVERT_ALPHA]
frame_count = 1 frame_count = 1
if all_frames: if all_frames:
try: with contextlib.suppress(AttributeError):
frame_count = image.n_frames frame_count = image.n_frames
except AttributeError:
pass
if frame_count <= 1: if frame_count <= 1:
_LOGGER.warning("Image file %s has no animation frames", path) _LOGGER.warning("Image file %s has no animation frames", path)

View File

@ -27,14 +27,13 @@ def validate_logger(config):
logger_conf = fv.full_config.get()[CONF_LOGGER] logger_conf = fv.full_config.get()[CONF_LOGGER]
if logger_conf[CONF_BAUD_RATE] == 0: if logger_conf[CONF_BAUD_RATE] == 0:
raise cv.Invalid("improv_serial requires the logger baud_rate to be not 0") raise cv.Invalid("improv_serial requires the logger baud_rate to be not 0")
if CORE.using_esp_idf: if CORE.using_esp_idf and (
if ( logger_conf[CONF_HARDWARE_UART] == USB_CDC
logger_conf[CONF_HARDWARE_UART] == USB_CDC and get_esp32_variant() == VARIANT_ESP32S3
and get_esp32_variant() == VARIANT_ESP32S3 ):
): raise cv.Invalid(
raise cv.Invalid( "improv_serial does not support the selected logger hardware_uart"
"improv_serial does not support the selected logger hardware_uart" )
)
return config return config

View File

@ -78,11 +78,8 @@ def validate_model_config(config):
model = config[CONF_MODEL] model = config[CONF_MODEL]
for key in config: for key in config:
if key in SENSOR_MODEL_OPTIONS: if key in SENSOR_MODEL_OPTIONS and model not in SENSOR_MODEL_OPTIONS[key]:
if model not in SENSOR_MODEL_OPTIONS[key]: raise cv.Invalid(f"Device model '{model}' does not support '{key}' sensor")
raise cv.Invalid(
f"Device model '{model}' does not support '{key}' sensor"
)
tempco = config[CONF_TEMPERATURE_COEFFICIENT] tempco = config[CONF_TEMPERATURE_COEFFICIENT]
if tempco > 0 and model not in ["INA228", "INA229"]: if tempco > 0 and model not in ["INA228", "INA229"]:

View File

@ -41,9 +41,7 @@ CONFIG_SCHEMA = lcd_base.LCD_SCHEMA.extend(
async def to_code(config): async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID]) var = cg.new_Pvariable(config[CONF_ID])
await lcd_base.setup_lcd_display(var, config) await lcd_base.setup_lcd_display(var, config)
pins_ = [] pins_ = [await cg.gpio_pin_expression(conf) for conf in config[CONF_DATA_PINS]]
for conf in config[CONF_DATA_PINS]:
pins_.append(await cg.gpio_pin_expression(conf))
cg.add(var.set_data_pins(*pins_)) cg.add(var.set_data_pins(*pins_))
enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN]) enable = await cg.gpio_pin_expression(config[CONF_ENABLE_PIN])
cg.add(var.set_enable_pin(enable)) cg.add(var.set_enable_pin(enable))

View File

@ -56,7 +56,8 @@ async def to_code(config):
sens = await text_sensor.new_text_sensor(mac_address_config) sens = await text_sensor.new_text_sensor(mac_address_config)
cg.add(ld2450_component.set_mac_text_sensor(sens)) cg.add(ld2450_component.set_mac_text_sensor(sens))
for n in range(MAX_TARGETS): for n in range(MAX_TARGETS):
if direction_conf := config.get(f"target_{n + 1}"): if (direction_conf := config.get(f"target_{n + 1}")) and (
if direction_config := direction_conf.get(CONF_DIRECTION): direction_config := direction_conf.get(CONF_DIRECTION)
sens = await text_sensor.new_text_sensor(direction_config) ):
cg.add(ld2450_component.set_direction_text_sensor(n, sens)) sens = await text_sensor.new_text_sensor(direction_config)
cg.add(ld2450_component.set_direction_text_sensor(n, sens))

View File

@ -291,31 +291,30 @@ async def random_effect_to_code(config, effect_id):
) )
async def strobe_effect_to_code(config, effect_id): async def strobe_effect_to_code(config, effect_id):
var = cg.new_Pvariable(effect_id, config[CONF_NAME]) var = cg.new_Pvariable(effect_id, config[CONF_NAME])
colors = [] colors = [
for color in config.get(CONF_COLORS, []): cg.StructInitializer(
colors.append( StrobeLightEffectColor,
cg.StructInitializer( (
StrobeLightEffectColor, "color",
( LightColorValues(
"color", color.get(CONF_COLOR_MODE, ColorMode.UNKNOWN),
LightColorValues( color[CONF_STATE],
color.get(CONF_COLOR_MODE, ColorMode.UNKNOWN), color[CONF_BRIGHTNESS],
color[CONF_STATE], color[CONF_COLOR_BRIGHTNESS],
color[CONF_BRIGHTNESS], color[CONF_RED],
color[CONF_COLOR_BRIGHTNESS], color[CONF_GREEN],
color[CONF_RED], color[CONF_BLUE],
color[CONF_GREEN], color[CONF_WHITE],
color[CONF_BLUE], color.get(CONF_COLOR_TEMPERATURE, 0.0),
color[CONF_WHITE], color[CONF_COLD_WHITE],
color.get(CONF_COLOR_TEMPERATURE, 0.0), color[CONF_WARM_WHITE],
color[CONF_COLD_WHITE],
color[CONF_WARM_WHITE],
),
), ),
("duration", color[CONF_DURATION]), ),
("transition_length", color[CONF_TRANSITION_LENGTH]), ("duration", color[CONF_DURATION]),
) ("transition_length", color[CONF_TRANSITION_LENGTH]),
) )
for color in config.get(CONF_COLORS, [])
]
cg.add(var.set_colors(colors)) cg.add(var.set_colors(colors))
return var return var
@ -404,20 +403,19 @@ async def addressable_color_wipe_effect_to_code(config, effect_id):
var = cg.new_Pvariable(effect_id, config[CONF_NAME]) var = cg.new_Pvariable(effect_id, config[CONF_NAME])
cg.add(var.set_add_led_interval(config[CONF_ADD_LED_INTERVAL])) cg.add(var.set_add_led_interval(config[CONF_ADD_LED_INTERVAL]))
cg.add(var.set_reverse(config[CONF_REVERSE])) cg.add(var.set_reverse(config[CONF_REVERSE]))
colors = [] colors = [
for color in config.get(CONF_COLORS, []): cg.StructInitializer(
colors.append( AddressableColorWipeEffectColor,
cg.StructInitializer( ("r", int(round(color[CONF_RED] * 255))),
AddressableColorWipeEffectColor, ("g", int(round(color[CONF_GREEN] * 255))),
("r", int(round(color[CONF_RED] * 255))), ("b", int(round(color[CONF_BLUE] * 255))),
("g", int(round(color[CONF_GREEN] * 255))), ("w", int(round(color[CONF_WHITE] * 255))),
("b", int(round(color[CONF_BLUE] * 255))), ("random", color[CONF_RANDOM]),
("w", int(round(color[CONF_WHITE] * 255))), ("num_leds", color[CONF_NUM_LEDS]),
("random", color[CONF_RANDOM]), ("gradient", color[CONF_GRADIENT]),
("num_leds", color[CONF_NUM_LEDS]),
("gradient", color[CONF_GRADIENT]),
)
) )
for color in config.get(CONF_COLORS, [])
]
cg.add(var.set_colors(colors)) cg.add(var.set_colors(colors))
return var return var
@ -526,7 +524,7 @@ def validate_effects(allowed_effects):
errors = [] errors = []
names = set() names = set()
for i, x in enumerate(value): for i, x in enumerate(value):
key = next(it for it in x.keys()) key = next(it for it in x)
if key not in allowed_effects: if key not in allowed_effects:
errors.append( errors.append(
cv.Invalid( cv.Invalid(

View File

@ -346,14 +346,13 @@ async def to_code(config):
if config.get(CONF_ESP8266_STORE_LOG_STRINGS_IN_FLASH): if config.get(CONF_ESP8266_STORE_LOG_STRINGS_IN_FLASH):
cg.add_build_flag("-DUSE_STORE_LOG_STR_IN_FLASH") cg.add_build_flag("-DUSE_STORE_LOG_STR_IN_FLASH")
if CORE.using_arduino: if CORE.using_arduino and config[CONF_HARDWARE_UART] == USB_CDC:
if config[CONF_HARDWARE_UART] == USB_CDC: cg.add_build_flag("-DARDUINO_USB_CDC_ON_BOOT=1")
cg.add_build_flag("-DARDUINO_USB_CDC_ON_BOOT=1") if CORE.is_esp32 and get_esp32_variant() in (
if CORE.is_esp32 and get_esp32_variant() in ( VARIANT_ESP32C3,
VARIANT_ESP32C3, VARIANT_ESP32C6,
VARIANT_ESP32C6, ):
): cg.add_build_flag("-DARDUINO_USB_MODE=1")
cg.add_build_flag("-DARDUINO_USB_MODE=1")
if CORE.using_esp_idf: if CORE.using_esp_idf:
if config[CONF_HARDWARE_UART] == USB_CDC: if config[CONF_HARDWARE_UART] == USB_CDC:

View File

@ -201,9 +201,8 @@ def final_validation(configs):
multi_conf_validate(configs) multi_conf_validate(configs)
global_config = full_config.get() global_config = full_config.get()
for config in configs: for config in configs:
if pages := config.get(CONF_PAGES): if (pages := config.get(CONF_PAGES)) and all(p[df.CONF_SKIP] for p in pages):
if all(p[df.CONF_SKIP] for p in pages): raise cv.Invalid("At least one page must not be skipped")
raise cv.Invalid("At least one page must not be skipped")
for display_id in config[df.CONF_DISPLAYS]: for display_id in config[df.CONF_DISPLAYS]:
path = global_config.get_path_for_id(display_id)[:-1] path = global_config.get_path_for_id(display_id)[:-1]
display = global_config.get_config_for_path(path) display = global_config.get_config_for_path(path)

View File

@ -28,9 +28,10 @@ CONF_HAS_PULLDOWNS = "has_pulldowns"
def check_keys(obj): def check_keys(obj):
if CONF_KEYS in obj: if CONF_KEYS in obj and len(obj[CONF_KEYS]) != len(obj[CONF_ROWS]) * len(
if len(obj[CONF_KEYS]) != len(obj[CONF_ROWS]) * len(obj[CONF_COLUMNS]): obj[CONF_COLUMNS]
raise cv.Invalid("The number of key codes must equal the number of buttons") ):
raise cv.Invalid("The number of key codes must equal the number of buttons")
return obj return obj

View File

@ -124,11 +124,10 @@ async def to_code(config):
if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM): if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM):
cg.add(var.set_task_stack_in_psram(task_stack_in_psram)) cg.add(var.set_task_stack_in_psram(task_stack_in_psram))
if task_stack_in_psram: if task_stack_in_psram and config[CONF_TASK_STACK_IN_PSRAM]:
if config[CONF_TASK_STACK_IN_PSRAM]: esp32.add_idf_sdkconfig_option(
esp32.add_idf_sdkconfig_option( "CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True )
)
for speaker_config in config[CONF_SOURCE_SPEAKERS]: for speaker_config in config[CONF_SOURCE_SPEAKERS]:
source_speaker = cg.new_Pvariable(speaker_config[CONF_ID]) source_speaker = cg.new_Pvariable(speaker_config[CONF_ID])

View File

@ -63,11 +63,13 @@ def _validate(config):
raise cv.Invalid( raise cv.Invalid(
f"{axis}: {CONF_RESOLUTION} cannot be {res} with {CONF_TEMPERATURE_COMPENSATION} enabled" f"{axis}: {CONF_RESOLUTION} cannot be {res} with {CONF_TEMPERATURE_COMPENSATION} enabled"
) )
if config[CONF_HALLCONF] == 0xC: if config[CONF_HALLCONF] == 0xC and (
if (config[CONF_OVERSAMPLING], config[CONF_FILTER]) in [(0, 0), (1, 0), (0, 1)]: config[CONF_OVERSAMPLING],
raise cv.Invalid( config[CONF_FILTER],
f"{CONF_OVERSAMPLING}=={config[CONF_OVERSAMPLING]} and {CONF_FILTER}=={config[CONF_FILTER]} not allowed with {CONF_HALLCONF}=={config[CONF_HALLCONF]:#02x}" ) in [(0, 0), (1, 0), (0, 1)]:
) raise cv.Invalid(
f"{CONF_OVERSAMPLING}=={config[CONF_OVERSAMPLING]} and {CONF_FILTER}=={config[CONF_FILTER]} not allowed with {CONF_HALLCONF}=={config[CONF_HALLCONF]:#02x}"
)
return config return config

View File

@ -56,12 +56,13 @@ def _final_validate(config):
for binary_sensor in binary_sensors: for binary_sensor in binary_sensors:
if binary_sensor.get(CONF_MPR121_ID) == config[CONF_ID]: if binary_sensor.get(CONF_MPR121_ID) == config[CONF_ID]:
max_touch_channel = max(max_touch_channel, binary_sensor[CONF_CHANNEL]) max_touch_channel = max(max_touch_channel, binary_sensor[CONF_CHANNEL])
if max_touch_channel_in_config := config.get(CONF_MAX_TOUCH_CHANNEL): if (
if max_touch_channel != max_touch_channel_in_config: max_touch_channel_in_config := config.get(CONF_MAX_TOUCH_CHANNEL)
raise cv.Invalid( ) and max_touch_channel != max_touch_channel_in_config:
"Max touch channel must equal the highest binary sensor channel or be removed for auto calculation", raise cv.Invalid(
path=[CONF_MAX_TOUCH_CHANNEL], "Max touch channel must equal the highest binary sensor channel or be removed for auto calculation",
) path=[CONF_MAX_TOUCH_CHANNEL],
)
path = fconf.get_path_for_id(config[CONF_ID])[:-1] path = fconf.get_path_for_id(config[CONF_ID])[:-1]
this_config = fconf.get_config_for_path(path) this_config = fconf.get_config_for_path(path)
this_config[CONF_MAX_TOUCH_CHANNEL] = max_touch_channel this_config[CONF_MAX_TOUCH_CHANNEL] = max_touch_channel

View File

@ -27,7 +27,7 @@ void MQTTButtonComponent::setup() {
} }
void MQTTButtonComponent::dump_config() { void MQTTButtonComponent::dump_config() {
ESP_LOGCONFIG(TAG, "MQTT Button '%s': ", this->button_->get_name().c_str()); ESP_LOGCONFIG(TAG, "MQTT Button '%s': ", this->button_->get_name().c_str());
LOG_MQTT_COMPONENT(true, true); LOG_MQTT_COMPONENT(false, true);
} }
void MQTTButtonComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) { void MQTTButtonComponent::send_discovery(JsonObject root, mqtt::SendDiscoveryConfig &config) {

View File

@ -25,9 +25,9 @@ async def new_openthermnumber(config: dict[str, Any]) -> cg.Pvariable:
await cg.register_component(var, config) await cg.register_component(var, config)
input.generate_setters(var, config) input.generate_setters(var, config)
if (initial_value := config.get(CONF_INITIAL_VALUE, None)) is not None: if (initial_value := config.get(CONF_INITIAL_VALUE)) is not None:
cg.add(var.set_initial_value(initial_value)) cg.add(var.set_initial_value(initial_value))
if (restore_value := config.get(CONF_RESTORE_VALUE, None)) is not None: if (restore_value := config.get(CONF_RESTORE_VALUE)) is not None:
cg.add(var.set_restore_value(restore_value)) cg.add(var.set_restore_value(restore_value))
return var return var

View File

@ -79,9 +79,8 @@ def set_sdkconfig_options(config):
"CONFIG_OPENTHREAD_NETWORK_PSKC", f"{pskc:X}".lower() "CONFIG_OPENTHREAD_NETWORK_PSKC", f"{pskc:X}".lower()
) )
if force_dataset := config.get(CONF_FORCE_DATASET): if config.get(CONF_FORCE_DATASET):
if force_dataset: cg.add_define("USE_OPENTHREAD_FORCE_DATASET")
cg.add_define("USE_OPENTHREAD_FORCE_DATASET")
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_DNS64_CLIENT", True) add_idf_sdkconfig_option("CONFIG_OPENTHREAD_DNS64_CLIENT", True)
add_idf_sdkconfig_option("CONFIG_OPENTHREAD_SRP_CLIENT", True) add_idf_sdkconfig_option("CONFIG_OPENTHREAD_SRP_CLIENT", True)

View File

@ -89,9 +89,10 @@ def validate_(config):
raise cv.Invalid("No sensors or binary sensors to encrypt") raise cv.Invalid("No sensors or binary sensors to encrypt")
elif config[CONF_ROLLING_CODE_ENABLE]: elif config[CONF_ROLLING_CODE_ENABLE]:
raise cv.Invalid("Rolling code requires an encryption key") raise cv.Invalid("Rolling code requires an encryption key")
if config[CONF_PING_PONG_ENABLE]: if config[CONF_PING_PONG_ENABLE] and not any(
if not any(CONF_ENCRYPTION in p for p in config.get(CONF_PROVIDERS) or ()): CONF_ENCRYPTION in p for p in config.get(CONF_PROVIDERS) or ()
raise cv.Invalid("Ping-pong requires at least one encrypted provider") ):
raise cv.Invalid("Ping-pong requires at least one encrypted provider")
return config return config

View File

@ -273,7 +273,7 @@ CONFIG_SCHEMA = PIPSOLAR_COMPONENT_SCHEMA.extend(
async def to_code(config): async def to_code(config):
paren = await cg.get_variable(config[CONF_PIPSOLAR_ID]) paren = await cg.get_variable(config[CONF_PIPSOLAR_ID])
for type, _ in TYPES.items(): for type in TYPES:
if type in config: if type in config:
conf = config[type] conf = config[type]
sens = await sensor.new_sensor(conf) sens = await sensor.new_sensor(conf)

View File

@ -49,12 +49,15 @@ def validate_internal_filter(value):
[CONF_USE_PCNT], [CONF_USE_PCNT],
) )
if CORE.is_esp32 and use_pcnt: if (
if value.get(CONF_INTERNAL_FILTER).total_microseconds > 13: CORE.is_esp32
raise cv.Invalid( and use_pcnt
"Maximum internal filter value when using ESP32 hardware PCNT is 13us", and value.get(CONF_INTERNAL_FILTER).total_microseconds > 13
[CONF_INTERNAL_FILTER], ):
) raise cv.Invalid(
"Maximum internal filter value when using ESP32 hardware PCNT is 13us",
[CONF_INTERNAL_FILTER],
)
return value return value

View File

@ -73,9 +73,8 @@ def map_sequence(value):
def _validate(config): def _validate(config):
chip = DriverChip.chips[config[CONF_MODEL]] chip = DriverChip.chips[config[CONF_MODEL]]
if not chip.initsequence: if not chip.initsequence and CONF_INIT_SEQUENCE not in config:
if CONF_INIT_SEQUENCE not in config: raise cv.Invalid(f"{chip.name} model requires init_sequence")
raise cv.Invalid(f"{chip.name} model requires init_sequence")
return config return config

View File

@ -24,9 +24,8 @@ QwiicPIRComponent = qwiic_pir_ns.class_(
def validate_no_debounce_unless_native(config): def validate_no_debounce_unless_native(config):
if CONF_DEBOUNCE in config: if CONF_DEBOUNCE in config and config[CONF_DEBOUNCE_MODE] != "NATIVE":
if config[CONF_DEBOUNCE_MODE] != "NATIVE": raise cv.Invalid("debounce can only be set if debounce_mode is NATIVE")
raise cv.Invalid("debounce can only be set if debounce_mode is NATIVE")
return config return config

View File

@ -1,4 +1,5 @@
#include "rc522.h" #include "rc522.h"
#include "esphome/core/helpers.h"
#include "esphome/core/log.h" #include "esphome/core/log.h"
// Based on: // Based on:
@ -13,30 +14,6 @@ static const char *const TAG = "rc522";
static const uint8_t RESET_COUNT = 5; static const uint8_t RESET_COUNT = 5;
std::string format_buffer(uint8_t *b, uint8_t len) {
char buf[32];
int offset = 0;
for (uint8_t i = 0; i < len; i++) {
const char *format = "%02X";
if (i + 1 < len)
format = "%02X-";
offset += sprintf(buf + offset, format, b[i]);
}
return std::string(buf);
}
std::string format_uid(std::vector<uint8_t> &uid) {
char buf[32];
int offset = 0;
for (size_t i = 0; i < uid.size(); i++) {
const char *format = "%02X";
if (i + 1 < uid.size())
format = "%02X-";
offset += sprintf(buf + offset, format, uid[i]);
}
return std::string(buf);
}
void RC522::setup() { void RC522::setup() {
state_ = STATE_SETUP; state_ = STATE_SETUP;
// Pull device out of power down / reset state. // Pull device out of power down / reset state.
@ -215,7 +192,7 @@ void RC522::loop() {
ESP_LOGV(TAG, "STATE_READ_SERIAL_DONE -> TIMEOUT (no tag present) %d", status); ESP_LOGV(TAG, "STATE_READ_SERIAL_DONE -> TIMEOUT (no tag present) %d", status);
} else { } else {
ESP_LOGW(TAG, "Unexpected response. Read status is %d. Read bytes: %d (%s)", status, back_length_, ESP_LOGW(TAG, "Unexpected response. Read status is %d. Read bytes: %d (%s)", status, back_length_,
format_buffer(buffer_, 9).c_str()); format_hex_pretty(buffer_, back_length_, '-', false).c_str());
} }
state_ = STATE_DONE; state_ = STATE_DONE;
@ -239,7 +216,7 @@ void RC522::loop() {
std::vector<uint8_t> rfid_uid(std::begin(uid_buffer_), std::begin(uid_buffer_) + uid_idx_); std::vector<uint8_t> rfid_uid(std::begin(uid_buffer_), std::begin(uid_buffer_) + uid_idx_);
uid_idx_ = 0; uid_idx_ = 0;
// ESP_LOGD(TAG, "Processing '%s'", format_uid(rfid_uid).c_str()); // ESP_LOGD(TAG, "Processing '%s'", format_hex_pretty(rfid_uid, '-', false).c_str());
pcd_antenna_off_(); pcd_antenna_off_();
state_ = STATE_INIT; // scan again on next update state_ = STATE_INIT; // scan again on next update
bool report = true; bool report = true;
@ -260,13 +237,13 @@ void RC522::loop() {
trigger->process(rfid_uid); trigger->process(rfid_uid);
if (report) { if (report) {
ESP_LOGD(TAG, "Found new tag '%s'", format_uid(rfid_uid).c_str()); ESP_LOGD(TAG, "Found new tag '%s'", format_hex_pretty(rfid_uid, '-', false).c_str());
} }
break; break;
} }
case STATE_DONE: { case STATE_DONE: {
if (!this->current_uid_.empty()) { if (!this->current_uid_.empty()) {
ESP_LOGV(TAG, "Tag '%s' removed", format_uid(this->current_uid_).c_str()); ESP_LOGV(TAG, "Tag '%s' removed", format_hex_pretty(this->current_uid_, '-', false).c_str());
for (auto *trigger : this->triggers_ontagremoved_) for (auto *trigger : this->triggers_ontagremoved_)
trigger->process(this->current_uid_); trigger->process(this->current_uid_);
} }
@ -361,7 +338,7 @@ void RC522::pcd_clear_register_bit_mask_(PcdRegister reg, ///< The register to
* @return STATUS_OK on success, STATUS_??? otherwise. * @return STATUS_OK on success, STATUS_??? otherwise.
*/ */
void RC522::pcd_transceive_data_(uint8_t send_len) { void RC522::pcd_transceive_data_(uint8_t send_len) {
ESP_LOGV(TAG, "PCD TRANSCEIVE: RX: %s", format_buffer(buffer_, send_len).c_str()); ESP_LOGV(TAG, "PCD TRANSCEIVE: RX: %s", format_hex_pretty(buffer_, send_len, '-', false).c_str());
delayMicroseconds(1000); // we need 1 ms delay between antenna on and those communication commands delayMicroseconds(1000); // we need 1 ms delay between antenna on and those communication commands
send_len_ = send_len; send_len_ = send_len;
// Prepare values for BitFramingReg // Prepare values for BitFramingReg
@ -435,7 +412,8 @@ RC522::StatusCode RC522::await_transceive_() {
error_reg_value); // TODO: is this always due to collissions? error_reg_value); // TODO: is this always due to collissions?
return STATUS_ERROR; return STATUS_ERROR;
} }
ESP_LOGV(TAG, "received %d bytes: %s", back_length_, format_buffer(buffer_ + send_len_, back_length_).c_str()); ESP_LOGV(TAG, "received %d bytes: %s", back_length_,
format_hex_pretty(buffer_ + send_len_, back_length_, '-', false).c_str());
return STATUS_OK; return STATUS_OK;
} }
@ -499,7 +477,7 @@ bool RC522BinarySensor::process(std::vector<uint8_t> &data) {
this->found_ = result; this->found_ = result;
return result; return result;
} }
void RC522Trigger::process(std::vector<uint8_t> &data) { this->trigger(format_uid(data)); } void RC522Trigger::process(std::vector<uint8_t> &data) { this->trigger(format_hex_pretty(data, '-', false)); }
} // namespace rc522 } // namespace rc522
} // namespace esphome } // namespace esphome

View File

@ -1062,12 +1062,11 @@ def validate_raw_alternating(value):
last_negative = None last_negative = None
for i, val in enumerate(value): for i, val in enumerate(value):
this_negative = val < 0 this_negative = val < 0
if i != 0: if i != 0 and this_negative == last_negative:
if this_negative == last_negative: raise cv.Invalid(
raise cv.Invalid( f"Values must alternate between being positive and negative, please see index {i} and {i + 1}",
f"Values must alternate between being positive and negative, please see index {i} and {i + 1}", [i],
[i], )
)
last_negative = this_negative last_negative = this_negative
return value return value

View File

@ -90,11 +90,10 @@ async def to_code(config):
if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM): if task_stack_in_psram := config.get(CONF_TASK_STACK_IN_PSRAM):
cg.add(var.set_task_stack_in_psram(task_stack_in_psram)) cg.add(var.set_task_stack_in_psram(task_stack_in_psram))
if task_stack_in_psram: if task_stack_in_psram and config[CONF_TASK_STACK_IN_PSRAM]:
if config[CONF_TASK_STACK_IN_PSRAM]: esp32.add_idf_sdkconfig_option(
esp32.add_idf_sdkconfig_option( "CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True
"CONFIG_SPIRAM_ALLOW_STACK_EXTERNAL_MEMORY", True )
)
cg.add(var.set_target_bits_per_sample(config[CONF_BITS_PER_SAMPLE])) cg.add(var.set_target_bits_per_sample(config[CONF_BITS_PER_SAMPLE]))
cg.add(var.set_target_sample_rate(config[CONF_SAMPLE_RATE])) cg.add(var.set_target_sample_rate(config[CONF_SAMPLE_RATE]))

View File

@ -140,7 +140,6 @@ async def to_code(config):
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH])) cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED])) cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY])) cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
index = 0
dpins = [] dpins = []
if CONF_RED in config[CONF_DATA_PINS]: if CONF_RED in config[CONF_DATA_PINS]:
red_pins = config[CONF_DATA_PINS][CONF_RED] red_pins = config[CONF_DATA_PINS][CONF_RED]
@ -158,10 +157,9 @@ async def to_code(config):
dpins = dpins[8:16] + dpins[0:8] dpins = dpins[8:16] + dpins[0:8]
else: else:
dpins = config[CONF_DATA_PINS] dpins = config[CONF_DATA_PINS]
for pin in dpins: for index, pin in enumerate(dpins):
data_pin = await cg.gpio_pin_expression(pin) data_pin = await cg.gpio_pin_expression(pin)
cg.add(var.add_data_pin(data_pin, index)) cg.add(var.add_data_pin(data_pin, index))
index += 1
if enable_pin := config.get(CONF_ENABLE_PIN): if enable_pin := config.get(CONF_ENABLE_PIN):
enable = await cg.gpio_pin_expression(enable_pin) enable = await cg.gpio_pin_expression(enable_pin)

View File

@ -12,7 +12,7 @@ from esphome.const import (
UNIT_DECIBEL, UNIT_DECIBEL,
) )
AUTOLOAD = ["audio"] AUTO_LOAD = ["audio"]
CODEOWNERS = ["@kahrendt"] CODEOWNERS = ["@kahrendt"]
DEPENDENCIES = ["microphone"] DEPENDENCIES = ["microphone"]

View File

@ -204,13 +204,14 @@ def _validate_pipeline(config):
def _validate_repeated_speaker(config): def _validate_repeated_speaker(config):
if (announcement_config := config.get(CONF_ANNOUNCEMENT_PIPELINE)) and ( if (
media_config := config.get(CONF_MEDIA_PIPELINE) (announcement_config := config.get(CONF_ANNOUNCEMENT_PIPELINE))
and (media_config := config.get(CONF_MEDIA_PIPELINE))
and announcement_config[CONF_SPEAKER] == media_config[CONF_SPEAKER]
): ):
if announcement_config[CONF_SPEAKER] == media_config[CONF_SPEAKER]: raise cv.Invalid(
raise cv.Invalid( "The announcement and media pipelines cannot use the same speaker. Use the `mixer` speaker component to create two source speakers."
"The announcement and media pipelines cannot use the same speaker. Use the `mixer` speaker component to create two source speakers." )
)
return config return config

View File

@ -115,9 +115,7 @@ def get_target_platform():
def get_target_variant(): def get_target_variant():
return ( return CORE.data[KEY_ESP32].get(KEY_VARIANT, "")
CORE.data[KEY_ESP32][KEY_VARIANT] if KEY_VARIANT in CORE.data[KEY_ESP32] else ""
)
# Get a list of available hardware interfaces based on target and variant. # Get a list of available hardware interfaces based on target and variant.
@ -213,9 +211,7 @@ def validate_hw_pins(spi, index=-1):
return False return False
if sdo_pin_no not in pin_set[CONF_MOSI_PIN]: if sdo_pin_no not in pin_set[CONF_MOSI_PIN]:
return False return False
if sdi_pin_no not in pin_set[CONF_MISO_PIN]: return sdi_pin_no in pin_set[CONF_MISO_PIN]
return False
return True
return False return False

View File

@ -130,11 +130,11 @@ def validate_sprinkler(config):
if ( if (
CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY in sprinkler_controller CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY in sprinkler_controller
and CONF_VALVE_OPEN_DELAY not in sprinkler_controller and CONF_VALVE_OPEN_DELAY not in sprinkler_controller
and sprinkler_controller[CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY]
): ):
if sprinkler_controller[CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY]: raise cv.Invalid(
raise cv.Invalid( f"{CONF_VALVE_OPEN_DELAY} must be defined when {CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY} is enabled"
f"{CONF_VALVE_OPEN_DELAY} must be defined when {CONF_PUMP_SWITCH_OFF_DURING_VALVE_OPEN_DELAY} is enabled" )
)
if ( if (
CONF_REPEAT in sprinkler_controller CONF_REPEAT in sprinkler_controller

View File

@ -42,14 +42,15 @@ SSD1306_MODEL = cv.enum(MODELS, upper=True, space="_")
def _validate(value): def _validate(value):
model = value[CONF_MODEL] model = value[CONF_MODEL]
if model not in ("SSD1305_128X32", "SSD1305_128X64"): if (
# Contrast is default value (1.0) while brightness is not model not in ("SSD1305_128X32", "SSD1305_128X64")
# Indicates user is using old `brightness` option and value[CONF_BRIGHTNESS] != 1.0
if value[CONF_BRIGHTNESS] != 1.0 and value[CONF_CONTRAST] == 1.0: and value[CONF_CONTRAST] == 1.0
raise cv.Invalid( ):
"SSD1306/SH1106 no longer accepts brightness option, " raise cv.Invalid(
'please use "contrast" instead.' "SSD1306/SH1106 no longer accepts brightness option, "
) 'please use "contrast" instead.'
)
return value return value

View File

@ -189,7 +189,6 @@ async def to_code(config):
cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH])) cg.add(var.set_vsync_front_porch(config[CONF_VSYNC_FRONT_PORCH]))
cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED])) cg.add(var.set_pclk_inverted(config[CONF_PCLK_INVERTED]))
cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY])) cg.add(var.set_pclk_frequency(config[CONF_PCLK_FREQUENCY]))
index = 0
dpins = [] dpins = []
if CONF_RED in config[CONF_DATA_PINS]: if CONF_RED in config[CONF_DATA_PINS]:
red_pins = config[CONF_DATA_PINS][CONF_RED] red_pins = config[CONF_DATA_PINS][CONF_RED]
@ -207,10 +206,9 @@ async def to_code(config):
dpins = dpins[8:16] + dpins[0:8] dpins = dpins[8:16] + dpins[0:8]
else: else:
dpins = config[CONF_DATA_PINS] dpins = config[CONF_DATA_PINS]
for pin in dpins: for index, pin in enumerate(dpins):
data_pin = await cg.gpio_pin_expression(pin) data_pin = await cg.gpio_pin_expression(pin)
cg.add(var.add_data_pin(data_pin, index)) cg.add(var.add_data_pin(data_pin, index))
index += 1
if dc_pin := config.get(CONF_DC_PIN): if dc_pin := config.get(CONF_DC_PIN):
dc = await cg.gpio_pin_expression(dc_pin) dc = await cg.gpio_pin_expression(dc_pin)

View File

@ -49,15 +49,14 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing):
try: try:
# Invoke the jinja engine to evaluate the expression. # Invoke the jinja engine to evaluate the expression.
value, err = jinja.expand(value) value, err = jinja.expand(value)
if err is not None: if err is not None and not ignore_missing and "password" not in path:
if not ignore_missing and "password" not in path: _LOGGER.warning(
_LOGGER.warning( "Found '%s' (see %s) which looks like an expression,"
"Found '%s' (see %s) which looks like an expression," " but could not resolve all the variables: %s",
" but could not resolve all the variables: %s", value,
value, "->".join(str(x) for x in path),
"->".join(str(x) for x in path), err.message,
err.message, )
)
except ( except (
TemplateError, TemplateError,
TemplateRuntimeError, TemplateRuntimeError,

View File

@ -1,3 +1,4 @@
import contextlib
import re import re
from esphome import automation from esphome import automation
@ -41,12 +42,10 @@ ELEVATION_MAP = {
def elevation(value): def elevation(value):
if isinstance(value, str): if isinstance(value, str):
try: with contextlib.suppress(cv.Invalid):
value = ELEVATION_MAP[ value = ELEVATION_MAP[
cv.one_of(*ELEVATION_MAP, lower=True, space="_")(value) cv.one_of(*ELEVATION_MAP, lower=True, space="_")(value)
] ]
except cv.Invalid:
pass
value = cv.angle(value) value = cv.angle(value)
return cv.float_range(min=-180, max=180)(value) return cv.float_range(min=-180, max=180)(value)

View File

@ -41,11 +41,13 @@ SX1509KeyTrigger = sx1509_ns.class_(
def check_keys(config): def check_keys(config):
if CONF_KEYS in config: if (
if len(config[CONF_KEYS]) != config[CONF_KEY_ROWS] * config[CONF_KEY_COLUMNS]: CONF_KEYS in config
raise cv.Invalid( and len(config[CONF_KEYS]) != config[CONF_KEY_ROWS] * config[CONF_KEY_COLUMNS]
"The number of key codes must equal the number of rows * columns" ):
) raise cv.Invalid(
"The number of key codes must equal the number of rows * columns"
)
return config return config

View File

@ -477,11 +477,11 @@ def validate_thermostat(config):
if ( if (
CONF_ON_BOOT_RESTORE_FROM in config CONF_ON_BOOT_RESTORE_FROM in config
and config[CONF_ON_BOOT_RESTORE_FROM] is OnBootRestoreFrom.DEFAULT_PRESET and config[CONF_ON_BOOT_RESTORE_FROM] is OnBootRestoreFrom.DEFAULT_PRESET
and CONF_DEFAULT_PRESET not in config
): ):
if CONF_DEFAULT_PRESET not in config: raise cv.Invalid(
raise cv.Invalid( f"{CONF_DEFAULT_PRESET} must be defined to use {CONF_ON_BOOT_RESTORE_FROM} in DEFAULT_PRESET mode"
f"{CONF_DEFAULT_PRESET} must be defined to use {CONF_ON_BOOT_RESTORE_FROM} in DEFAULT_PRESET mode" )
)
if config[CONF_FAN_WITH_COOLING] is True and CONF_FAN_ONLY_ACTION not in config: if config[CONF_FAN_WITH_COOLING] is True and CONF_FAN_ONLY_ACTION not in config:
raise cv.Invalid( raise cv.Invalid(

View File

@ -236,7 +236,7 @@ def validate_time_at(value):
def validate_cron_keys(value): def validate_cron_keys(value):
if CONF_CRON in value: if CONF_CRON in value:
for key in value.keys(): for key in value:
if key in CRON_KEYS: if key in CRON_KEYS:
raise cv.Invalid(f"Cannot use option {key} when cron: is specified.") raise cv.Invalid(f"Cannot use option {key} when cron: is specified.")
if CONF_AT in value: if CONF_AT in value:
@ -246,7 +246,7 @@ def validate_cron_keys(value):
value.update(cron_) value.update(cron_)
return value return value
if CONF_AT in value: if CONF_AT in value:
for key in value.keys(): for key in value:
if key in CRON_KEYS: if key in CRON_KEYS:
raise cv.Invalid(f"Cannot use option {key} when at: is specified.") raise cv.Invalid(f"Cannot use option {key} when at: is specified.")
at_ = value[CONF_AT] at_ = value[CONF_AT]

View File

@ -46,16 +46,15 @@ TuyaClimate = tuya_ns.class_("TuyaClimate", climate.Climate, cg.Component)
def validate_temperature_multipliers(value): def validate_temperature_multipliers(value):
if CONF_TEMPERATURE_MULTIPLIER in value: if CONF_TEMPERATURE_MULTIPLIER in value and (
if ( CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value or CONF_TARGET_TEMPERATURE_MULTIPLIER in value
or CONF_TARGET_TEMPERATURE_MULTIPLIER in value ):
): raise cv.Invalid(
raise cv.Invalid( f"Cannot have {CONF_TEMPERATURE_MULTIPLIER} at the same time as "
f"Cannot have {CONF_TEMPERATURE_MULTIPLIER} at the same time as " f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} and "
f"{CONF_CURRENT_TEMPERATURE_MULTIPLIER} and " f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}"
f"{CONF_TARGET_TEMPERATURE_MULTIPLIER}" )
)
if ( if (
CONF_CURRENT_TEMPERATURE_MULTIPLIER in value CONF_CURRENT_TEMPERATURE_MULTIPLIER in value
and CONF_TARGET_TEMPERATURE_MULTIPLIER not in value and CONF_TARGET_TEMPERATURE_MULTIPLIER not in value

View File

@ -34,12 +34,14 @@ def validate_min_max(config):
min_value = config[CONF_MIN_VALUE] min_value = config[CONF_MIN_VALUE]
if max_value <= min_value: if max_value <= min_value:
raise cv.Invalid("max_value must be greater than min_value") raise cv.Invalid("max_value must be greater than min_value")
if hidden_config := config.get(CONF_DATAPOINT_HIDDEN): if (
if (initial_value := hidden_config.get(CONF_INITIAL_VALUE, None)) is not None: (hidden_config := config.get(CONF_DATAPOINT_HIDDEN))
if (initial_value > max_value) or (initial_value < min_value): and (initial_value := hidden_config.get(CONF_INITIAL_VALUE, None)) is not None
raise cv.Invalid( and ((initial_value > max_value) or (initial_value < min_value))
f"{CONF_INITIAL_VALUE} must be a value between {CONF_MAX_VALUE} and {CONF_MIN_VALUE}" ):
) raise cv.Invalid(
f"{CONF_INITIAL_VALUE} must be a value between {CONF_MAX_VALUE} and {CONF_MIN_VALUE}"
)
return config return config

View File

@ -442,9 +442,7 @@ async def to_code(config):
if CORE.is_esp8266: if CORE.is_esp8266:
cg.add_library("ESP8266WiFi", None) cg.add_library("ESP8266WiFi", None)
elif CORE.is_esp32 and CORE.using_arduino: elif (CORE.is_esp32 and CORE.using_arduino) or CORE.is_rp2040:
cg.add_library("WiFi", None)
elif CORE.is_rp2040:
cg.add_library("WiFi", None) cg.add_library("WiFi", None)
if CORE.is_esp32 and CORE.using_esp_idf: if CORE.is_esp32 and CORE.using_esp_idf:

View File

@ -198,10 +198,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
self.output_paths.remove((path, domain)) self.output_paths.remove((path, domain))
def is_in_error_path(self, path: ConfigPath) -> bool: def is_in_error_path(self, path: ConfigPath) -> bool:
for err in self.errors: return any(_path_begins_with(err.path, path) for err in self.errors)
if _path_begins_with(err.path, path):
return True
return False
def set_by_path(self, path, value): def set_by_path(self, path, value):
conf = self conf = self
@ -224,7 +221,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
for index, path_item in enumerate(path): for index, path_item in enumerate(path):
try: try:
if path_item in data: if path_item in data:
key_data = [x for x in data.keys() if x == path_item][0] key_data = [x for x in data if x == path_item][0]
if isinstance(key_data, ESPHomeDataBase): if isinstance(key_data, ESPHomeDataBase):
doc_range = key_data.esp_range doc_range = key_data.esp_range
if get_key and index == len(path) - 1: if get_key and index == len(path) - 1:
@ -1081,7 +1078,7 @@ def dump_dict(
ret += "{}" ret += "{}"
multiline = False multiline = False
for k in conf.keys(): for k in conf:
path_ = path + [k] path_ = path + [k]
error = config.get_error_for_path(path_) error = config.get_error_for_path(path_)
if error is not None: if error is not None:
@ -1097,10 +1094,7 @@ def dump_dict(
msg = f"\n{indent(msg)}" msg = f"\n{indent(msg)}"
if inf is not None: if inf is not None:
if m: msg = f" {inf}{msg}" if m else f"{msg} {inf}"
msg = f" {inf}{msg}"
else:
msg = f"{msg} {inf}"
ret += f"{st + msg}\n" ret += f"{st + msg}\n"
elif isinstance(conf, str): elif isinstance(conf, str):
if is_secret(conf): if is_secret(conf):

View File

@ -2,7 +2,7 @@
from __future__ import annotations from __future__ import annotations
from contextlib import contextmanager from contextlib import contextmanager, suppress
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime from datetime import datetime
from ipaddress import ( from ipaddress import (
@ -2113,10 +2113,8 @@ def require_esphome_version(year, month, patch):
@contextmanager @contextmanager
def suppress_invalid(): def suppress_invalid():
try: with suppress(vol.Invalid):
yield yield
except vol.Invalid:
pass
GIT_SCHEMA = Schema( GIT_SCHEMA = Schema(

View File

@ -317,7 +317,7 @@ def preload_core_config(config, result) -> str:
target_platforms = [] target_platforms = []
for domain, _ in config.items(): for domain in config:
if domain.startswith("."): if domain.startswith("."):
continue continue
if _is_target_platform(domain): if _is_target_platform(domain):

View File

@ -65,7 +65,7 @@ static void validate_static_string(const char *name) {
// Common implementation for both timeout and interval // Common implementation for both timeout and interval
void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type type, bool is_static_string, void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type type, bool is_static_string,
const void *name_ptr, uint32_t delay, std::function<void()> func) { const void *name_ptr, uint32_t delay, std::function<void()> func, bool is_retry) {
// Get the name as const char* // Get the name as const char*
const char *name_cstr = this->get_name_cstr_(is_static_string, name_ptr); const char *name_cstr = this->get_name_cstr_(is_static_string, name_ptr);
@ -130,6 +130,18 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type
#endif /* ESPHOME_DEBUG_SCHEDULER */ #endif /* ESPHOME_DEBUG_SCHEDULER */
LockGuard guard{this->lock_}; LockGuard guard{this->lock_};
// For retries, check if there's a cancelled timeout first
if (is_retry && name_cstr != nullptr && type == SchedulerItem::TIMEOUT &&
(has_cancelled_timeout_in_container_(this->items_, component, name_cstr) ||
has_cancelled_timeout_in_container_(this->to_add_, component, name_cstr))) {
// Skip scheduling - the retry was cancelled
#ifdef ESPHOME_DEBUG_SCHEDULER
ESP_LOGD(TAG, "Skipping retry '%s' - found cancelled item", name_cstr);
#endif
return;
}
// If name is provided, do atomic cancel-and-add // If name is provided, do atomic cancel-and-add
// Cancel existing items // Cancel existing items
this->cancel_item_locked_(component, name_cstr, type); this->cancel_item_locked_(component, name_cstr, type);
@ -178,12 +190,14 @@ struct RetryArgs {
Scheduler *scheduler; Scheduler *scheduler;
}; };
static void retry_handler(const std::shared_ptr<RetryArgs> &args) { void retry_handler(const std::shared_ptr<RetryArgs> &args) {
RetryResult const retry_result = args->func(--args->retry_countdown); RetryResult const retry_result = args->func(--args->retry_countdown);
if (retry_result == RetryResult::DONE || args->retry_countdown <= 0) if (retry_result == RetryResult::DONE || args->retry_countdown <= 0)
return; return;
// second execution of `func` happens after `initial_wait_time` // second execution of `func` happens after `initial_wait_time`
args->scheduler->set_timeout(args->component, args->name, args->current_interval, [args]() { retry_handler(args); }); args->scheduler->set_timer_common_(
args->component, Scheduler::SchedulerItem::TIMEOUT, false, &args->name, args->current_interval,
[args]() { retry_handler(args); }, true);
// backoff_increase_factor applied to third & later executions // backoff_increase_factor applied to third & later executions
args->current_interval *= args->backoff_increase_factor; args->current_interval *= args->backoff_increase_factor;
} }

View File

@ -15,8 +15,15 @@
namespace esphome { namespace esphome {
class Component; class Component;
struct RetryArgs;
// Forward declaration of retry_handler - needs to be non-static for friend declaration
void retry_handler(const std::shared_ptr<RetryArgs> &args);
class Scheduler { class Scheduler {
// Allow retry_handler to access protected members
friend void ::esphome::retry_handler(const std::shared_ptr<RetryArgs> &args);
public: public:
// Public API - accepts std::string for backward compatibility // Public API - accepts std::string for backward compatibility
void set_timeout(Component *component, const std::string &name, uint32_t timeout, std::function<void()> func); void set_timeout(Component *component, const std::string &name, uint32_t timeout, std::function<void()> func);
@ -147,7 +154,7 @@ class Scheduler {
// Common implementation for both timeout and interval // Common implementation for both timeout and interval
void set_timer_common_(Component *component, SchedulerItem::Type type, bool is_static_string, const void *name_ptr, void set_timer_common_(Component *component, SchedulerItem::Type type, bool is_static_string, const void *name_ptr,
uint32_t delay, std::function<void()> func); uint32_t delay, std::function<void()> func, bool is_retry = false);
uint64_t millis_64_(uint32_t now); uint64_t millis_64_(uint32_t now);
// Cleanup logically deleted items from the scheduler // Cleanup logically deleted items from the scheduler
@ -170,8 +177,8 @@ class Scheduler {
// Helper function to check if item matches criteria for cancellation // Helper function to check if item matches criteria for cancellation
inline bool HOT matches_item_(const std::unique_ptr<SchedulerItem> &item, Component *component, const char *name_cstr, inline bool HOT matches_item_(const std::unique_ptr<SchedulerItem> &item, Component *component, const char *name_cstr,
SchedulerItem::Type type) { SchedulerItem::Type type, bool skip_removed = true) const {
if (item->component != component || item->type != type || item->remove) { if (item->component != component || item->type != type || (skip_removed && item->remove)) {
return false; return false;
} }
const char *item_name = item->get_name(); const char *item_name = item->get_name();
@ -197,6 +204,18 @@ class Scheduler {
return item->remove || (item->component != nullptr && item->component->is_failed()); return item->remove || (item->component != nullptr && item->component->is_failed());
} }
// Template helper to check if any item in a container matches our criteria
template<typename Container>
bool has_cancelled_timeout_in_container_(const Container &container, Component *component,
const char *name_cstr) const {
for (const auto &item : container) {
if (item->remove && this->matches_item_(item, component, name_cstr, SchedulerItem::TIMEOUT, false)) {
return true;
}
}
return false;
}
Mutex lock_; Mutex lock_;
std::vector<std::unique_ptr<SchedulerItem>> items_; std::vector<std::unique_ptr<SchedulerItem>> items_;
std::vector<std::unique_ptr<SchedulerItem>> to_add_; std::vector<std::unique_ptr<SchedulerItem>> to_add_;

View File

@ -1037,10 +1037,7 @@ class MockObjClass(MockObj):
def inherits_from(self, other: "MockObjClass") -> bool: def inherits_from(self, other: "MockObjClass") -> bool:
if str(self) == str(other): if str(self) == str(other):
return True return True
for parent in self._parents: return any(str(parent) == str(other) for parent in self._parents)
if str(parent) == str(other):
return True
return False
def template(self, *args: SafeExpType) -> "MockObjClass": def template(self, *args: SafeExpType) -> "MockObjClass":
if len(args) != 1 or not isinstance(args[0], TemplateArguments): if len(args) != 1 or not isinstance(args[0], TemplateArguments):

View File

@ -3,6 +3,7 @@ from __future__ import annotations
import asyncio import asyncio
from asyncio import events from asyncio import events
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
import contextlib
import logging import logging
import os import os
import socket import socket
@ -125,10 +126,8 @@ def start_dashboard(args) -> None:
asyncio.set_event_loop_policy(DashboardEventLoopPolicy(settings.verbose)) asyncio.set_event_loop_policy(DashboardEventLoopPolicy(settings.verbose))
try: with contextlib.suppress(KeyboardInterrupt):
asyncio.run(async_start(args)) asyncio.run(async_start(args))
except KeyboardInterrupt:
pass
async def async_start(args) -> None: async def async_start(args) -> None:

View File

@ -144,7 +144,7 @@ def websocket_class(cls):
if not hasattr(cls, "_message_handlers"): if not hasattr(cls, "_message_handlers"):
cls._message_handlers = {} cls._message_handlers = {}
for _, method in cls.__dict__.items(): for method in cls.__dict__.values():
if hasattr(method, "_message_handler"): if hasattr(method, "_message_handler"):
cls._message_handlers[method._message_handler] = method cls._message_handlers[method._message_handler] = method

View File

@ -88,10 +88,7 @@ def recv_decode(sock, amount, decode=True):
def receive_exactly(sock, amount, msg, expect, decode=True): def receive_exactly(sock, amount, msg, expect, decode=True):
if decode: data = [] if decode else b""
data = []
else:
data = b""
try: try:
data += recv_decode(sock, 1, decode=decode) data += recv_decode(sock, 1, decode=decode)

View File

@ -96,9 +96,7 @@ def cpp_string_escape(string, encoding="utf-8"):
def _should_escape(byte: int) -> bool: def _should_escape(byte: int) -> bool:
if not 32 <= byte < 127: if not 32 <= byte < 127:
return True return True
if byte in (ord("\\"), ord('"')): return byte in (ord("\\"), ord('"'))
return True
return False
if isinstance(string, str): if isinstance(string, str):
string = string.encode(encoding) string = string.encode(encoding)

View File

@ -61,7 +61,7 @@ class ESPHomeLogFormatter(logging.Formatter):
}.get(record.levelname, "") }.get(record.levelname, "")
message = f"{prefix}{formatted}{AnsiStyle.RESET_ALL.value}" message = f"{prefix}{formatted}{AnsiStyle.RESET_ALL.value}"
if CORE.dashboard: if CORE.dashboard:
try: try: # noqa: SIM105
message = message.replace("\033", "\\033") message = message.replace("\033", "\\033")
except UnicodeEncodeError: except UnicodeEncodeError:
pass pass

View File

@ -1,3 +1,4 @@
import contextlib
from datetime import datetime from datetime import datetime
import hashlib import hashlib
import json import json
@ -52,10 +53,8 @@ def initialize(
client = prepare( client = prepare(
config, subscriptions, on_message, on_connect, username, password, client_id config, subscriptions, on_message, on_connect, username, password, client_id
) )
try: with contextlib.suppress(KeyboardInterrupt):
client.loop_forever() client.loop_forever()
except KeyboardInterrupt:
pass
return 0 return 0

View File

@ -141,9 +141,11 @@ def _load_idedata(config):
temp_idedata = Path(CORE.relative_internal_path("idedata", f"{CORE.name}.json")) temp_idedata = Path(CORE.relative_internal_path("idedata", f"{CORE.name}.json"))
changed = False changed = False
if not platformio_ini.is_file() or not temp_idedata.is_file(): if (
changed = True not platformio_ini.is_file()
elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime: or not temp_idedata.is_file()
or platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime
):
changed = True changed = True
if not changed: if not changed:

View File

@ -59,7 +59,7 @@ def safe_print(message="", end="\n"):
from esphome.core import CORE from esphome.core import CORE
if CORE.dashboard: if CORE.dashboard:
try: try: # noqa: SIM105
message = message.replace("\033", "\\033") message = message.replace("\033", "\\033")
except UnicodeEncodeError: except UnicodeEncodeError:
pass pass

View File

@ -116,10 +116,7 @@ def wizard_file(**kwargs):
kwargs["fallback_name"] = ap_name kwargs["fallback_name"] = ap_name
kwargs["fallback_psk"] = "".join(random.choice(letters) for _ in range(12)) kwargs["fallback_psk"] = "".join(random.choice(letters) for _ in range(12))
if kwargs.get("friendly_name"): base = BASE_CONFIG_FRIENDLY if kwargs.get("friendly_name") else BASE_CONFIG
base = BASE_CONFIG_FRIENDLY
else:
base = BASE_CONFIG
config = base.format(**kwargs) config = base.format(**kwargs)

View File

@ -86,21 +86,17 @@ def storage_should_clean(old: StorageJSON, new: StorageJSON) -> bool:
if old.src_version != new.src_version: if old.src_version != new.src_version:
return True return True
if old.build_path != new.build_path: return old.build_path != new.build_path
return True
return False
def storage_should_update_cmake_cache(old: StorageJSON, new: StorageJSON) -> bool: def storage_should_update_cmake_cache(old: StorageJSON, new: StorageJSON) -> bool:
if ( if (
old.loaded_integrations != new.loaded_integrations old.loaded_integrations != new.loaded_integrations
or old.loaded_platforms != new.loaded_platforms or old.loaded_platforms != new.loaded_platforms
): ) and new.core_platform == PLATFORM_ESP32:
if new.core_platform == PLATFORM_ESP32: from esphome.components.esp32 import FRAMEWORK_ESP_IDF
from esphome.components.esp32 import FRAMEWORK_ESP_IDF
return new.framework == FRAMEWORK_ESP_IDF return new.framework == FRAMEWORK_ESP_IDF
return False return False

View File

@ -56,9 +56,12 @@ class ESPHomeDataBase:
def from_node(self, node): def from_node(self, node):
# pylint: disable=attribute-defined-outside-init # pylint: disable=attribute-defined-outside-init
self._esp_range = DocumentRange.from_marks(node.start_mark, node.end_mark) self._esp_range = DocumentRange.from_marks(node.start_mark, node.end_mark)
if isinstance(node, yaml.ScalarNode): if (
if node.style is not None and node.style in "|>": isinstance(node, yaml.ScalarNode)
self._content_offset = 1 and node.style is not None
and node.style in "|>"
):
self._content_offset = 1
def from_database(self, database): def from_database(self, database):
# pylint: disable=attribute-defined-outside-init # pylint: disable=attribute-defined-outside-init

View File

@ -40,6 +40,7 @@ lib_deps =
functionpointer/arduino-MLX90393@1.0.2 ; mlx90393 functionpointer/arduino-MLX90393@1.0.2 ; mlx90393
pavlodn/HaierProtocol@0.9.31 ; haier pavlodn/HaierProtocol@0.9.31 ; haier
kikuchan98/pngle@1.1.0 ; online_image kikuchan98/pngle@1.1.0 ; online_image
https://github.com/esphome/TinyGPSPlus.git#v1.1.0 ; gps
; Using the repository directly, otherwise ESP-IDF can't use the library ; Using the repository directly, otherwise ESP-IDF can't use the library
https://github.com/bitbank2/JPEGDEC.git#ca1e0f2 ; online_image https://github.com/bitbank2/JPEGDEC.git#ca1e0f2 ; online_image
; This is using the repository until a new release is published to PlatformIO ; This is using the repository until a new release is published to PlatformIO
@ -73,7 +74,6 @@ lib_deps =
heman/AsyncMqttClient-esphome@1.0.0 ; mqtt heman/AsyncMqttClient-esphome@1.0.0 ; mqtt
ESP32Async/ESPAsyncWebServer@3.7.8 ; web_server_base ESP32Async/ESPAsyncWebServer@3.7.8 ; web_server_base
fastled/FastLED@3.9.16 ; fastled_base fastled/FastLED@3.9.16 ; fastled_base
mikalhart/TinyGPSPlus@1.1.0 ; gps
freekode/TM1651@1.0.1 ; tm1651 freekode/TM1651@1.0.1 ; tm1651
glmnet/Dsmr@0.7 ; dsmr glmnet/Dsmr@0.7 ; dsmr
rweather/Crypto@0.4.0 ; dsmr rweather/Crypto@0.4.0 ; dsmr
@ -180,13 +180,6 @@ build_unflags =
${common.build_unflags} ${common.build_unflags}
extra_scripts = post:esphome/components/esp32/post_build.py.script extra_scripts = post:esphome/components/esp32/post_build.py.script
; This are common settings for the ESP32 using the latest ESP-IDF version.
[common:esp32-idf-5_3]
extends = common:esp32-idf
platform = platformio/espressif32@6.8.0
platform_packages =
platformio/framework-espidf@~3.50300.0
; These are common settings for the RP2040 using Arduino. ; These are common settings for the RP2040 using Arduino.
[common:rp2040-arduino] [common:rp2040-arduino]
extends = common:arduino extends = common:arduino
@ -239,6 +232,7 @@ lib_deps =
wjtje/qr-code-generator-library@1.7.0 ; qr_code wjtje/qr-code-generator-library@1.7.0 ; qr_code
pavlodn/HaierProtocol@0.9.31 ; haier pavlodn/HaierProtocol@0.9.31 ; haier
functionpointer/arduino-MLX90393@1.0.2 ; mlx90393 functionpointer/arduino-MLX90393@1.0.2 ; mlx90393
https://github.com/esphome/TinyGPSPlus.git#v1.1.0 ; gps
https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library
lvgl/lvgl@8.4.0 ; lvgl lvgl/lvgl@8.4.0 ; lvgl
@ -298,17 +292,6 @@ build_flags =
build_unflags = build_unflags =
${common.build_unflags} ${common.build_unflags}
[env:esp32-idf-5_3]
extends = common:esp32-idf-5_3
board = esp32dev
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32-idf
build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32
build_unflags =
${common.build_unflags}
[env:esp32-idf-tidy] [env:esp32-idf-tidy]
extends = common:esp32-idf extends = common:esp32-idf
board = esp32dev board = esp32dev
@ -353,17 +336,6 @@ build_flags =
build_unflags = build_unflags =
${common.build_unflags} ${common.build_unflags}
[env:esp32c3-idf-5_3]
extends = common:esp32-idf-5_3
board = esp32-c3-devkitm-1
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32c3-idf
build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32C3
build_unflags =
${common.build_unflags}
[env:esp32c3-idf-tidy] [env:esp32c3-idf-tidy]
extends = common:esp32-idf extends = common:esp32-idf
board = esp32-c3-devkitm-1 board = esp32-c3-devkitm-1
@ -419,17 +391,6 @@ build_flags =
build_unflags = build_unflags =
${common.build_unflags} ${common.build_unflags}
[env:esp32s2-idf-5_3]
extends = common:esp32-idf-5_3
board = esp32-s2-kaluga-1
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32s2-idf
build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S2
build_unflags =
${common.build_unflags}
[env:esp32s2-idf-tidy] [env:esp32s2-idf-tidy]
extends = common:esp32-idf extends = common:esp32-idf
board = esp32-s2-kaluga-1 board = esp32-s2-kaluga-1
@ -474,17 +435,6 @@ build_flags =
build_unflags = build_unflags =
${common.build_unflags} ${common.build_unflags}
[env:esp32s3-idf-5_3]
extends = common:esp32-idf-5_3
board = esp32-s3-devkitc-1
board_build.esp-idf.sdkconfig_path = .temp/sdkconfig-esp32s3-idf
build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S3
build_unflags =
${common.build_unflags}
[env:esp32s3-idf-tidy] [env:esp32s3-idf-tidy]
extends = common:esp32-idf extends = common:esp32-idf
board = esp32-s3-devkitc-1 board = esp32-s3-devkitc-1
@ -565,6 +515,8 @@ build_flags =
build_unflags = build_unflags =
${common.build_unflags} ${common.build_unflags}
;;;;;;;; Host ;;;;;;;;
[env:host] [env:host]
extends = common extends = common
platform = platformio/native platform = platformio/native

View File

@ -111,11 +111,13 @@ exclude = ['generated']
[tool.ruff.lint] [tool.ruff.lint]
select = [ select = [
"E", # pycodestyle "E", # pycodestyle
"F", # pyflakes/autoflake "F", # pyflakes/autoflake
"I", # isort "I", # isort
"PL", # pylint "PERF", # performance
"UP", # pyupgrade "PL", # pylint
"SIM", # flake8-simplify
"UP", # pyupgrade
] ]
ignore = [ ignore = [

View File

@ -1,6 +1,6 @@
pylint==3.3.7 pylint==3.3.7
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
ruff==0.12.4 # also change in .pre-commit-config.yaml when updating ruff==0.12.5 # also change in .pre-commit-config.yaml when updating
pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating
pre-commit pre-commit

View File

@ -61,9 +61,7 @@ def indent_list(text: str, padding: str = " ") -> list[str]:
"""Indent each line of the given text with the specified padding.""" """Indent each line of the given text with the specified padding."""
lines = [] lines = []
for line in text.splitlines(): for line in text.splitlines():
if line == "": if line == "" or line.startswith("#ifdef") or line.startswith("#endif"):
p = ""
elif line.startswith("#ifdef") or line.startswith("#endif"):
p = "" p = ""
else: else:
p = padding p = padding
@ -2388,7 +2386,7 @@ static const char *const TAG = "api.service";
needs_conn = get_opt(m, pb.needs_setup_connection, True) needs_conn = get_opt(m, pb.needs_setup_connection, True)
needs_auth = get_opt(m, pb.needs_authentication, True) needs_auth = get_opt(m, pb.needs_authentication, True)
ifdef = message_ifdef_map.get(inp, ifdefs.get(inp, None)) ifdef = message_ifdef_map.get(inp, ifdefs.get(inp))
if ifdef is not None: if ifdef is not None:
hpp += f"#ifdef {ifdef}\n" hpp += f"#ifdef {ifdef}\n"

View File

@ -71,11 +71,13 @@ def get_component_names():
skip_components = [] skip_components = []
for d in os.listdir(CORE_COMPONENTS_PATH): for d in os.listdir(CORE_COMPONENTS_PATH):
if not d.startswith("__") and os.path.isdir( if (
os.path.join(CORE_COMPONENTS_PATH, d) not d.startswith("__")
and os.path.isdir(os.path.join(CORE_COMPONENTS_PATH, d))
and d not in component_names
and d not in skip_components
): ):
if d not in component_names and d not in skip_components: component_names.append(d)
component_names.append(d)
return sorted(component_names) return sorted(component_names)
@ -139,11 +141,10 @@ def register_module_schemas(key, module, manifest=None):
for name, schema in module_schemas(module): for name, schema in module_schemas(module):
register_known_schema(key, name, schema) register_known_schema(key, name, schema)
if manifest: if manifest and manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]:
# Multi conf should allow list of components # Multi conf should allow list of components
# not sure about 2nd part of the if, might be useless config (e.g. as3935) # not sure about 2nd part of the if, might be useless config (e.g. as3935)
if manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]: output[key][S_SCHEMAS][S_CONFIG_SCHEMA]["is_list"] = True
output[key][S_SCHEMAS][S_CONFIG_SCHEMA]["is_list"] = True
def register_known_schema(module, name, schema): def register_known_schema(module, name, schema):
@ -230,7 +231,7 @@ def add_module_registries(domain, module):
reg_type = attr_name.partition("_")[0].lower() reg_type = attr_name.partition("_")[0].lower()
found_registries[repr(attr_obj)] = f"{domain}.{reg_type}" found_registries[repr(attr_obj)] = f"{domain}.{reg_type}"
for name in attr_obj.keys(): for name in attr_obj:
if "." not in name: if "." not in name:
reg_entry_name = name reg_entry_name = name
else: else:
@ -700,7 +701,7 @@ def is_convertible_schema(schema):
if repr(schema) in ejs.registry_schemas: if repr(schema) in ejs.registry_schemas:
return True return True
if isinstance(schema, dict): if isinstance(schema, dict):
for k in schema.keys(): for k in schema:
if isinstance(k, (cv.Required, cv.Optional)): if isinstance(k, (cv.Required, cv.Optional)):
return True return True
return False return False
@ -818,7 +819,7 @@ def convert(schema, config_var, path):
elif schema_type == "automation": elif schema_type == "automation":
extra_schema = None extra_schema = None
config_var[S_TYPE] = "trigger" config_var[S_TYPE] = "trigger"
if automation.AUTOMATION_SCHEMA == ejs.extended_schemas[repr(data)][0]: if ejs.extended_schemas[repr(data)][0] == automation.AUTOMATION_SCHEMA:
extra_schema = ejs.extended_schemas[repr(data)][1] extra_schema = ejs.extended_schemas[repr(data)][1]
if ( if (
extra_schema is not None and len(extra_schema) > 1 extra_schema is not None and len(extra_schema) > 1
@ -926,9 +927,8 @@ def convert(schema, config_var, path):
config = convert_config(schema_type, path + "/type_" + schema_key) config = convert_config(schema_type, path + "/type_" + schema_key)
types[schema_key] = config["schema"] types[schema_key] = config["schema"]
elif DUMP_UNKNOWN: elif DUMP_UNKNOWN and S_TYPE not in config_var:
if S_TYPE not in config_var: config_var["unknown"] = repr_schema
config_var["unknown"] = repr_schema
if DUMP_PATH: if DUMP_PATH:
config_var["path"] = path config_var["path"] = path

View File

@ -66,9 +66,10 @@ def main():
) )
args = parser.parse_args() args = parser.parse_args()
files = [] cwd = os.getcwd()
for path in git_ls_files(["*.cpp", "*.h", "*.tcc"]): files = [
files.append(os.path.relpath(path, os.getcwd())) os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp", "*.h", "*.tcc"])
]
if args.files: if args.files:
# Match against files specified on command-line # Match against files specified on command-line

View File

@ -219,9 +219,8 @@ def main():
) )
args = parser.parse_args() args = parser.parse_args()
files = [] cwd = os.getcwd()
for path in git_ls_files(["*.cpp"]): files = [os.path.relpath(path, cwd) for path in git_ls_files(["*.cpp"])]
files.append(os.path.relpath(path, os.getcwd()))
# Print initial file count if it's large # Print initial file count if it's large
if len(files) > 50: if len(files) > 50:

View File

@ -365,9 +365,11 @@ def load_idedata(environment: str) -> dict[str, Any]:
platformio_ini = Path(root_path) / "platformio.ini" platformio_ini = Path(root_path) / "platformio.ini"
temp_idedata = Path(temp_folder) / f"idedata-{environment}.json" temp_idedata = Path(temp_folder) / f"idedata-{environment}.json"
changed = False changed = False
if not platformio_ini.is_file() or not temp_idedata.is_file(): if (
changed = True not platformio_ini.is_file()
elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime: or not temp_idedata.is_file()
or platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime
):
changed = True changed = True
if "idf" in environment: if "idf" in environment:

View File

@ -41,11 +41,12 @@ CONFIG_NEWLIB_LIBC=y
return include_paths return include_paths
def extract_defines(command): def extract_defines(command):
defines = []
define_pattern = re.compile(r"-D\s*([^\s]+)") define_pattern = re.compile(r"-D\s*([^\s]+)")
for match in define_pattern.findall(command): defines = [
if match not in ("_ASMLANGUAGE"): match
defines.append(match) for match in define_pattern.findall(command)
if match not in ("_ASMLANGUAGE")
]
return defines return defines
def find_cxx_path(commands): def find_cxx_path(commands):
@ -78,13 +79,14 @@ CONFIG_NEWLIB_LIBC=y
return include_paths return include_paths
def extract_cxx_flags(command): def extract_cxx_flags(command):
flags = []
# Extracts CXXFLAGS from the command string, excluding includes and defines. # Extracts CXXFLAGS from the command string, excluding includes and defines.
flag_pattern = re.compile( flag_pattern = re.compile(
r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)" r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)"
) )
for match in flag_pattern.findall(command): flags = [
flags.append(match.replace("-imacros ", "-imacros")) match.replace("-imacros ", "-imacros")
for match in flag_pattern.findall(command)
]
return flags return flags
def transform_to_idedata_format(compile_commands): def transform_to_idedata_format(compile_commands):

View File

@ -1,3 +1,7 @@
button: button:
- platform: factory_reset - platform: factory_reset
name: Reset to Factory Default Settings name: Reset to Factory Default Settings
factory_reset:
resets_required: 5
max_delay: 10s

View File

@ -0,0 +1 @@
<<: !include common.yaml

View File

@ -1 +1,4 @@
esp8266:
restore_from_flash: true
<<: !include common.yaml <<: !include common.yaml

View File

@ -1 +1,3 @@
<<: !include common.yaml button:
- platform: factory_reset
name: Reset to Factory Default Settings

View File

@ -0,0 +1,5 @@
substitutions:
tx_pin: GPIO4
rx_pin: GPIO5
<<: !include common.yaml

View File

@ -0,0 +1,5 @@
substitutions:
tx_pin: GPIO12
rx_pin: GPIO14
<<: !include common.yaml

Some files were not shown because too many files have changed in this diff Show More