[CI] Refactor auto-label workflow: modular architecture, CODEOWNERS automation, and performance improvements (#9860)

This commit is contained in:
Jesse Hills 2025-07-24 23:18:29 +12:00 committed by GitHub
parent 1344103086
commit ba1de5feff
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -23,24 +23,6 @@ jobs:
- name: Checkout - name: Checkout
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Get changes
id: changes
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Get PR number
pr_number="${{ github.event.pull_request.number }}"
# Get list of changed files using gh CLI
files=$(gh pr diff $pr_number --name-only)
echo "files<<EOF" >> $GITHUB_OUTPUT
echo "$files" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Get file stats (additions + deletions) using gh CLI
stats=$(gh pr view $pr_number --json files --jq '.files | map(.additions + .deletions) | add')
echo "total_changes=${stats:-0}" >> $GITHUB_OUTPUT
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@v2 uses: actions/create-github-app-token@v2
@ -55,93 +37,453 @@ jobs:
script: | script: |
const fs = require('fs'); const fs = require('fs');
// Constants
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
const TOO_BIG_MARKER = '<!-- too-big-request -->';
const MANAGED_LABELS = [
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'needs-codeowners',
'too-big',
'labeller-recheck'
];
const DOCS_PR_PATTERNS = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
// Global state
const { owner, repo } = context.repo; const { owner, repo } = context.repo;
const pr_number = context.issue.number; const pr_number = context.issue.number;
// Hidden marker to identify bot comments from this workflow // Get current labels and PR data
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
// Get current labels
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({ const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
owner, owner,
repo, repo,
issue_number: pr_number issue_number: pr_number
}); });
const currentLabels = currentLabelsData.map(label => label.name); const currentLabels = currentLabelsData.map(label => label.name);
// Define managed labels that this workflow controls
const managedLabels = currentLabels.filter(label => const managedLabels = currentLabels.filter(label =>
label.startsWith('component: ') || label.startsWith('component: ') || MANAGED_LABELS.includes(label)
[
'new-component',
'new-platform',
'new-target-platform',
'merging-to-release',
'merging-to-beta',
'core',
'small-pr',
'dashboard',
'github-actions',
'by-code-owner',
'has-tests',
'needs-tests',
'needs-docs',
'too-big',
'labeller-recheck'
].includes(label)
); );
const { data: prFiles } = await github.rest.pulls.listFiles({
owner,
repo,
pull_number: pr_number
});
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalChanges = prFiles.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
console.log('Current labels:', currentLabels.join(', ')); console.log('Current labels:', currentLabels.join(', '));
console.log('Managed labels:', managedLabels.join(', '));
// Get changed files
const changedFiles = `${{ steps.changes.outputs.files }}`.split('\n').filter(f => f.length > 0);
const totalChanges = parseInt('${{ steps.changes.outputs.total_changes }}') || 0;
console.log('Changed files:', changedFiles.length); console.log('Changed files:', changedFiles.length);
console.log('Total changes:', totalChanges); console.log('Total changes:', totalChanges);
const labels = new Set(); // Fetch API data
async function fetchApiData() {
// Fetch TARGET_PLATFORMS and PLATFORM_COMPONENTS from API try {
let targetPlatforms = []; const response = await fetch('https://data.esphome.io/components.json');
let platformComponents = []; const componentsData = await response.json();
return {
try { targetPlatforms: componentsData.target_platforms || [],
const response = await fetch('https://data.esphome.io/components.json'); platformComponents: componentsData.platform_components || []
const componentsData = await response.json(); };
} catch (error) {
// Extract target platforms and platform components directly from API console.log('Failed to fetch components data from API:', error.message);
targetPlatforms = componentsData.target_platforms || []; return { targetPlatforms: [], platformComponents: [] };
platformComponents = componentsData.platform_components || []; }
console.log('Target platforms from API:', targetPlatforms.length, targetPlatforms);
console.log('Platform components from API:', platformComponents.length, platformComponents);
} catch (error) {
console.log('Failed to fetch components data from API:', error.message);
} }
// Get environment variables // Strategy: Merge branch detection
const smallPrThreshold = parseInt('${{ env.SMALL_PR_THRESHOLD }}'); async function detectMergeBranch() {
const maxLabels = parseInt('${{ env.MAX_LABELS }}'); const labels = new Set();
const tooBigThreshold = parseInt('${{ env.TOO_BIG_THRESHOLD }}'); const baseRef = context.payload.pull_request.base.ref;
// Strategy: Merge to release or beta branch
const baseRef = context.payload.pull_request.base.ref;
if (baseRef !== 'dev') {
if (baseRef === 'release') { if (baseRef === 'release') {
labels.add('merging-to-release'); labels.add('merging-to-release');
} else if (baseRef === 'beta') { } else if (baseRef === 'beta') {
labels.add('merging-to-beta'); labels.add('merging-to-beta');
} }
// When targeting non-dev branches, only use merge warning labels return labels;
const finalLabels = Array.from(labels); }
// Strategy: Component and platform labeling
async function detectComponentPlatforms(apiData) {
const labels = new Set();
const componentRegex = /^esphome\/components\/([^\/]+)\//;
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
for (const file of changedFiles) {
const componentMatch = file.match(componentRegex);
if (componentMatch) {
labels.add(`component: ${componentMatch[1]}`);
}
const platformMatch = file.match(targetPlatformRegex);
if (platformMatch) {
labels.add(`platform: ${platformMatch[1]}`);
}
}
return labels;
}
// Strategy: New component detection
async function detectNewComponents() {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
const content = fs.readFileSync(file, 'utf8');
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
}
labels.add('new-component');
}
}
return labels;
}
// Strategy: New platform detection
async function detectNewPlatforms(apiData) {
const labels = new Set();
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
for (const file of addedFiles) {
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (apiData.platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
return labels;
}
// Strategy: Core files detection
async function detectCoreChanges() {
const labels = new Set();
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
return labels;
}
// Strategy: PR size detection
async function detectPRSize() {
const labels = new Set();
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
}
if (nonTestChanges > TOO_BIG_THRESHOLD) {
labels.add('too-big');
}
return labels;
}
// Strategy: Dashboard changes
async function detectDashboardChanges() {
const labels = new Set();
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
return labels;
}
// Strategy: GitHub Actions changes
async function detectGitHubActionsChanges() {
const labels = new Set();
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
return labels;
}
// Strategy: Code owner detection
async function detectCodeOwner() {
const labels = new Set();
try {
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
const regexPattern = pattern
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
.replace(/\\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
labels.add('by-code-owner');
return labels;
}
}
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
return labels;
}
// Strategy: Test detection
async function detectTests() {
const labels = new Set();
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
if (testFiles.length > 0) {
labels.add('has-tests');
}
return labels;
}
// Strategy: Requirements detection
async function detectRequirements(allLabels) {
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
// Check for missing docs
if (allLabels.has('new-component') || allLabels.has('new-platform')) {
const prBody = context.payload.pull_request.body || '';
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Check for missing CODEOWNERS
if (allLabels.has('new-component')) {
const codeownersModified = prFiles.some(file =>
file.filename === 'CODEOWNERS' &&
(file.status === 'modified' || file.status === 'added') &&
(file.additions || 0) > 0
);
if (!codeownersModified) {
labels.add('needs-codeowners');
}
}
return labels;
}
// Generate review messages
function generateReviewMessages(finalLabels) {
const messages = [];
const prAuthor = context.payload.pull_request.user.login;
// Too big message
if (finalLabels.includes('too-big')) {
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
const tooManyLabels = finalLabels.length > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
if (tooManyLabels && tooManyChanges) {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${finalLabels.length} different components/areas.`;
} else if (tooManyLabels) {
message += `This PR affects ${finalLabels.length} different components/areas.`;
} else {
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
}
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
messages.push(message);
}
// CODEOWNERS message
if (finalLabels.includes('needs-codeowners')) {
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
`Hey there @${prAuthor},\n` +
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
`This way we can notify you if a bug report for this integration is reported.\n\n` +
`In \`__init__.py\` of the integration, please add:\n\n` +
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
`And run \`script/build_codeowners.py\``;
messages.push(message);
}
return messages;
}
// Handle reviews
async function handleReviews(finalLabels) {
const reviewMessages = generateReviewMessages(finalLabels);
const hasReviewableLabels = finalLabels.some(label =>
['too-big', 'needs-codeowners'].includes(label)
);
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
if (hasReviewableLabels) {
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
if (botReviews.length > 0) {
// Update existing review
await github.rest.pulls.updateReview({
owner,
repo,
pull_number: pr_number,
review_id: botReviews[0].id,
body: reviewBody
});
console.log('Updated existing bot review');
} else {
// Create new review
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new bot review');
}
} else if (botReviews.length > 0) {
// Dismiss existing reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: 'Review dismissed: All requirements have been met'
});
console.log(`Dismissed bot review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Main execution
const apiData = await fetchApiData();
const baseRef = context.payload.pull_request.base.ref;
// Early exit for non-dev branches
if (baseRef !== 'dev') {
const branchLabels = await detectMergeBranch();
const finalLabels = Array.from(branchLabels);
console.log('Computed labels (merge branch only):', finalLabels.join(', ')); console.log('Computed labels (merge branch only):', finalLabels.join(', '));
// Add new labels // Apply labels
if (finalLabels.length > 0) { if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({ await github.rest.issues.addLabels({
owner, owner,
repo, repo,
@ -150,13 +492,9 @@ jobs:
}); });
} }
// Remove old managed labels that are no longer needed // Remove old managed labels
const labelsToRemove = managedLabels.filter(label => const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
!finalLabels.includes(label)
);
for (const label of labelsToRemove) { for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`);
try { try {
await github.rest.issues.removeLabel({ await github.rest.issues.removeLabel({
owner, owner,
@ -169,324 +507,70 @@ jobs:
} }
} }
return; // Exit early, don't process other strategies return;
} }
// Strategy: Component and Platform labeling // Run all strategies
const componentRegex = /^esphome\/components\/([^\/]+)\//; const [
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${targetPlatforms.join('|')})/`); branchLabels,
componentLabels,
newComponentLabels,
newPlatformLabels,
coreLabels,
sizeLabels,
dashboardLabels,
actionsLabels,
codeOwnerLabels,
testLabels
] = await Promise.all([
detectMergeBranch(),
detectComponentPlatforms(apiData),
detectNewComponents(),
detectNewPlatforms(apiData),
detectCoreChanges(),
detectPRSize(),
detectDashboardChanges(),
detectGitHubActionsChanges(),
detectCodeOwner(),
detectTests()
]);
for (const file of changedFiles) { // Combine all labels
// Check for component changes const allLabels = new Set([
const componentMatch = file.match(componentRegex); ...branchLabels,
if (componentMatch) { ...componentLabels,
const component = componentMatch[1]; ...newComponentLabels,
labels.add(`component: ${component}`); ...newPlatformLabels,
} ...coreLabels,
...sizeLabels,
...dashboardLabels,
...actionsLabels,
...codeOwnerLabels,
...testLabels
]);
// Check for target platform changes // Detect requirements based on all other labels
const platformMatch = file.match(targetPlatformRegex); const requirementLabels = await detectRequirements(allLabels);
if (platformMatch) { for (const label of requirementLabels) {
const targetPlatform = platformMatch[1]; allLabels.add(label);
labels.add(`platform: ${targetPlatform}`);
}
} }
// Get PR files for new component/platform detection let finalLabels = Array.from(allLabels);
const { data: prFiles } = await github.rest.pulls.listFiles({
owner,
repo,
pull_number: pr_number
});
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename); // Handle too many labels
const isMegaPR = currentLabels.includes('mega-pr');
const tooManyLabels = finalLabels.length > MAX_LABELS;
// Calculate changes excluding root tests directory for too-big calculation if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
const testChanges = prFiles finalLabels = ['too-big'];
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
console.log(`Test changes: ${testChanges}, Non-test changes: ${nonTestChanges}`);
// Strategy: New Component detection
for (const file of addedFiles) {
// Check for new component files: esphome/components/{component}/__init__.py
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
if (componentMatch) {
try {
// Read the content directly from the filesystem since we have it checked out
const content = fs.readFileSync(file, 'utf8');
// Strategy: New Target Platform detection
if (content.includes('IS_TARGET_PLATFORM = True')) {
labels.add('new-target-platform');
}
labels.add('new-component');
} catch (error) {
console.log(`Failed to read content of ${file}:`, error.message);
// Fallback: assume it's a new component if we can't read the content
labels.add('new-component');
}
}
} }
// Strategy: New Platform detection
for (const file of addedFiles) {
// Check for new platform files: esphome/components/{component}/{platform}.py
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
if (platformFileMatch) {
const [, component, platform] = platformFileMatch;
if (platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
// Check for new platform files: esphome/components/{component}/{platform}/__init__.py
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
if (platformDirMatch) {
const [, component, platform] = platformDirMatch;
if (platformComponents.includes(platform)) {
labels.add('new-platform');
}
}
}
const coreFiles = changedFiles.filter(file =>
file.startsWith('esphome/core/') ||
(file.startsWith('esphome/') && file.split('/').length === 2)
);
if (coreFiles.length > 0) {
labels.add('core');
}
// Strategy: Small PR detection
if (totalChanges <= smallPrThreshold) {
labels.add('small-pr');
}
// Strategy: Dashboard changes
const dashboardFiles = changedFiles.filter(file =>
file.startsWith('esphome/dashboard/') ||
file.startsWith('esphome/components/dashboard_import/')
);
if (dashboardFiles.length > 0) {
labels.add('dashboard');
}
// Strategy: GitHub Actions changes
const githubActionsFiles = changedFiles.filter(file =>
file.startsWith('.github/workflows/')
);
if (githubActionsFiles.length > 0) {
labels.add('github-actions');
}
// Strategy: Code Owner detection
try {
// Fetch CODEOWNERS file from the repository (in case it was changed in this PR)
const { data: codeownersFile } = await github.rest.repos.getContent({
owner,
repo,
path: 'CODEOWNERS',
});
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
const prAuthor = context.payload.pull_request.user.login;
// Parse CODEOWNERS file
const codeownersLines = codeownersContent.split('\n')
.map(line => line.trim())
.filter(line => line && !line.startsWith('#'));
let isCodeOwner = false;
// Precompile CODEOWNERS patterns into regex objects
const codeownersRegexes = codeownersLines.map(line => {
const parts = line.split(/\s+/);
const pattern = parts[0];
const owners = parts.slice(1);
let regex;
if (pattern.endsWith('*')) {
// Directory pattern like "esphome/components/api/*"
const dir = pattern.slice(0, -1);
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
} else if (pattern.includes('*')) {
// Glob pattern
const regexPattern = pattern
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
.replace(/\\*/g, '.*');
regex = new RegExp(`^${regexPattern}$`);
} else {
// Exact match
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
}
return { regex, owners };
});
for (const file of changedFiles) {
for (const { regex, owners } of codeownersRegexes) {
if (regex.test(file)) {
// Check if PR author is in the owners list
if (owners.some(owner => owner === `@${prAuthor}`)) {
isCodeOwner = true;
break;
}
}
}
if (isCodeOwner) break;
}
if (isCodeOwner) {
labels.add('by-code-owner');
}
} catch (error) {
console.log('Failed to read or parse CODEOWNERS file:', error.message);
}
// Strategy: Test detection
const testFiles = changedFiles.filter(file =>
file.startsWith('tests/')
);
if (testFiles.length > 0) {
labels.add('has-tests');
} else {
// Only check for needs-tests if this is a new component or new platform
if (labels.has('new-component') || labels.has('new-platform')) {
labels.add('needs-tests');
}
}
// Strategy: Documentation check for new components/platforms
if (labels.has('new-component') || labels.has('new-platform')) {
const prBody = context.payload.pull_request.body || '';
// Look for documentation PR links
// Patterns to match:
// - https://github.com/esphome/esphome-docs/pull/1234
// - esphome/esphome-docs#1234
const docsPrPatterns = [
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
/esphome\/esphome-docs#\d+/
];
const hasDocsLink = docsPrPatterns.some(pattern => pattern.test(prBody));
if (!hasDocsLink) {
labels.add('needs-docs');
}
}
// Convert Set to Array
let finalLabels = Array.from(labels);
console.log('Computed labels:', finalLabels.join(', ')); console.log('Computed labels:', finalLabels.join(', '));
// Check if PR has mega-pr label // Handle reviews
const isMegaPR = currentLabels.includes('mega-pr'); await handleReviews(finalLabels);
// Check if PR is too big (either too many labels or too many line changes) // Apply labels
const tooManyLabels = finalLabels.length > maxLabels;
const tooManyChanges = nonTestChanges > tooBigThreshold;
if ((tooManyLabels || tooManyChanges) && !isMegaPR) {
const originalLength = finalLabels.length;
console.log(`PR is too big - Labels: ${originalLength}, Changes: ${totalChanges} (non-test: ${nonTestChanges})`);
// Get all reviews on this PR to check for existing bot reviews
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
// Check if there's already an active bot review requesting changes
const existingBotReview = reviews.find(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
// If too big due to line changes only, keep original labels and add too-big
// If too big due to too many labels, replace with just too-big
if (tooManyChanges && !tooManyLabels) {
finalLabels.push('too-big');
} else {
finalLabels = ['too-big'];
}
// Only create a new review if there isn't already an active bot review
if (!existingBotReview) {
// Create appropriate review message
let reviewBody;
if (tooManyLabels && tooManyChanges) {
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLength} different components/areas. Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
} else if (tooManyLabels) {
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR affects ${originalLength} different components/areas. Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
} else {
reviewBody = `${BOT_COMMENT_MARKER}\nThis PR is too large with ${nonTestChanges} line changes (excluding tests). Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\nFor guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#but-howwww-looonnnggg`;
}
// Request changes on the PR
await github.rest.pulls.createReview({
owner,
repo,
pull_number: pr_number,
body: reviewBody,
event: 'REQUEST_CHANGES'
});
console.log('Created new "too big" review requesting changes');
} else {
console.log('Skipping review creation - existing bot review already requesting changes');
}
} else {
// Check if PR was previously too big but is now acceptable
const wasPreviouslyTooBig = currentLabels.includes('too-big');
if (wasPreviouslyTooBig || isMegaPR) {
console.log('PR is no longer too big or has mega-pr label - dismissing bot reviews');
// Get all reviews on this PR to find reviews to dismiss
const { data: reviews } = await github.rest.pulls.listReviews({
owner,
repo,
pull_number: pr_number
});
// Find bot reviews that requested changes
const botReviews = reviews.filter(review =>
review.user.type === 'Bot' &&
review.state === 'CHANGES_REQUESTED' &&
review.body && review.body.includes(BOT_COMMENT_MARKER)
);
// Dismiss bot reviews
for (const review of botReviews) {
try {
await github.rest.pulls.dismissReview({
owner,
repo,
pull_number: pr_number,
review_id: review.id,
message: isMegaPR ?
'Review dismissed: mega-pr label was added' :
'Review dismissed: PR size is now acceptable'
});
console.log(`Dismissed review ${review.id}`);
} catch (error) {
console.log(`Failed to dismiss review ${review.id}:`, error.message);
}
}
}
}
// Add new labels
if (finalLabels.length > 0) { if (finalLabels.length > 0) {
console.log(`Adding labels: ${finalLabels.join(', ')}`); console.log(`Adding labels: ${finalLabels.join(', ')}`);
await github.rest.issues.addLabels({ await github.rest.issues.addLabels({
@ -497,11 +581,8 @@ jobs:
}); });
} }
// Remove old managed labels that are no longer needed // Remove old managed labels
const labelsToRemove = managedLabels.filter(label => const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
!finalLabels.includes(label)
);
for (const label of labelsToRemove) { for (const label of labelsToRemove) {
console.log(`Removing label: ${label}`); console.log(`Removing label: ${label}`);
try { try {