treewide: make less webhook-centric

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ifab58fcb523549ca9cb83dc8467be51e6a6a6964
This commit is contained in:
raf 2026-02-01 14:38:58 +03:00
commit 374408834b
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
9 changed files with 479 additions and 39 deletions

View file

@ -9,8 +9,7 @@ export function createDashboardRouter(config: Config): express.Router {
router.use(express.json()); router.use(express.json());
// --- API routes --- // API routes
router.get('/api/status', (_req, res) => { router.get('/api/status', (_req, res) => {
const enabledBackends = Object.entries(config.engine.backends) const enabledBackends = Object.entries(config.engine.backends)
.filter(([, v]) => v.enabled) .filter(([, v]) => v.enabled)
@ -41,7 +40,10 @@ export function createDashboardRouter(config: Config): express.Router {
router.put('/api/config', (req, res) => { router.put('/api/config', (req, res) => {
try { try {
const partial = req.body as Partial<Config>; const partial = req.body as Partial<Config>;
const merged = deepMerge(config as Record<string, unknown>, partial as Record<string, unknown>) as Config; const merged = deepMerge(
config as Record<string, unknown>,
partial as Record<string, unknown>
) as Config;
validate(merged); validate(merged);
// Apply in-place // Apply in-place

View file

@ -81,20 +81,17 @@ export class ChecksBackend implements EngineBackend {
// Classify failures by severity // Classify failures by severity
const criticalFailures = failed.filter((r) => classifyCheck(r.name) === 'critical'); const criticalFailures = failed.filter((r) => classifyCheck(r.name) === 'critical');
const advisoryFailures = failed.filter((r) => classifyCheck(r.name) === 'advisory'); const advisoryFailures = failed.filter((r) => classifyCheck(r.name) === 'advisory');
const standardFailures = failed.filter( const standardFailures = failed.filter((r) => classifyCheck(r.name) === 'standard');
(r) => classifyCheck(r.name) === 'standard'
);
// Weighted scoring: critical failures count 3x, advisory 0.5x // Weighted scoring: critical failures count 3x, advisory 0.5x
const failureScore = const failureScore =
criticalFailures.length * 3 + standardFailures.length * 1 + advisoryFailures.length * 0.5; criticalFailures.length * 3 + standardFailures.length * 1 + advisoryFailures.length * 0.5;
const totalWeight = const totalWeight = completed
completed .filter((r) => !skipped.includes(r))
.filter((r) => !skipped.includes(r)) .reduce((s, r) => {
.reduce((s, r) => { const cls = classifyCheck(r.name);
const cls = classifyCheck(r.name); return s + (cls === 'critical' ? 3 : cls === 'advisory' ? 0.5 : 1);
return s + (cls === 'critical' ? 3 : cls === 'advisory' ? 0.5 : 1); }, 0);
}, 0);
const weightedPassRate = totalWeight > 0 ? 1 - failureScore / totalWeight : 0; const weightedPassRate = totalWeight > 0 ? 1 - failureScore / totalWeight : 0;
@ -117,13 +114,20 @@ export class ChecksBackend implements EngineBackend {
// Build detailed reasoning // Build detailed reasoning
const parts: string[] = []; const parts: string[] = [];
if (passed.length > 0) parts.push(`${passed.length} passed (${passed.map((r) => r.name).join(', ')})`); if (passed.length > 0)
parts.push(`${passed.length} passed (${passed.map((r) => r.name).join(', ')})`);
if (criticalFailures.length > 0) if (criticalFailures.length > 0)
parts.push(`${criticalFailures.length} critical failure(s) (${criticalFailures.map((r) => r.name).join(', ')})`); parts.push(
`${criticalFailures.length} critical failure(s) (${criticalFailures.map((r) => r.name).join(', ')})`
);
if (advisoryFailures.length > 0) if (advisoryFailures.length > 0)
parts.push(`${advisoryFailures.length} advisory failure(s) (${advisoryFailures.map((r) => r.name).join(', ')})`); parts.push(
`${advisoryFailures.length} advisory failure(s) (${advisoryFailures.map((r) => r.name).join(', ')})`
);
if (standardFailures.length > 0) if (standardFailures.length > 0)
parts.push(`${standardFailures.length} other failure(s) (${standardFailures.map((r) => r.name).join(', ')})`); parts.push(
`${standardFailures.length} other failure(s) (${standardFailures.map((r) => r.name).join(', ')})`
);
if (skipped.length > 0) parts.push(`${skipped.length} skipped`); if (skipped.length > 0) parts.push(`${skipped.length} skipped`);
if (pending.length > 0) parts.push(`${pending.length} still running`); if (pending.length > 0) parts.push(`${pending.length} still running`);

View file

@ -15,7 +15,9 @@ const RISKY_FILE_PATTERN =
const DOC_FILE_PATTERN = /\.(md|mdx|txt|rst|adoc)$|^(README|CHANGELOG|LICENSE|CONTRIBUTING)/i; const DOC_FILE_PATTERN = /\.(md|mdx|txt|rst|adoc)$|^(README|CHANGELOG|LICENSE|CONTRIBUTING)/i;
function categorizeFiles(files: { filename: string; additions: number; deletions: number; changes: number }[]) { function categorizeFiles(
files: { filename: string; additions: number; deletions: number; changes: number }[]
) {
const src: typeof files = []; const src: typeof files = [];
const tests: typeof files = []; const tests: typeof files = [];
const generated: typeof files = []; const generated: typeof files = [];
@ -89,7 +91,11 @@ export class DiffBackend implements EngineBackend {
} else if (totalChanges <= this.config.maxChanges) { } else if (totalChanges <= this.config.maxChanges) {
signals.push({ name: `large PR (${totalChanges} lines)`, positive: false, weight: 0.8 }); signals.push({ name: `large PR (${totalChanges} lines)`, positive: false, weight: 0.8 });
} else { } else {
signals.push({ name: `very large PR (${totalChanges} lines, exceeds limit)`, positive: false, weight: 1.5 }); signals.push({
name: `very large PR (${totalChanges} lines, exceeds limit)`,
positive: false,
weight: 1.5,
});
} }
// --- Focus signals --- // --- Focus signals ---
@ -98,9 +104,17 @@ export class DiffBackend implements EngineBackend {
} else if (meaningful.length <= 10) { } else if (meaningful.length <= 10) {
signals.push({ name: 'focused changeset', positive: true, weight: 0.8 }); signals.push({ name: 'focused changeset', positive: true, weight: 0.8 });
} else if (meaningful.length > 30) { } else if (meaningful.length > 30) {
signals.push({ name: `sprawling changeset (${meaningful.length} files)`, positive: false, weight: 1.2 }); signals.push({
name: `sprawling changeset (${meaningful.length} files)`,
positive: false,
weight: 1.2,
});
} else if (meaningful.length > 20) { } else if (meaningful.length > 20) {
signals.push({ name: `broad changeset (${meaningful.length} files)`, positive: false, weight: 0.6 }); signals.push({
name: `broad changeset (${meaningful.length} files)`,
positive: false,
weight: 0.6,
});
} }
// --- Test coverage --- // --- Test coverage ---
@ -129,10 +143,17 @@ export class DiffBackend implements EngineBackend {
// --- Churn detection (files with high add+delete suggesting rewrites) --- // --- Churn detection (files with high add+delete suggesting rewrites) ---
const highChurnFiles = src.filter( const highChurnFiles = src.filter(
(f) => f.additions > 50 && f.deletions > 50 && Math.min(f.additions, f.deletions) / Math.max(f.additions, f.deletions) > 0.6 (f) =>
f.additions > 50 &&
f.deletions > 50 &&
Math.min(f.additions, f.deletions) / Math.max(f.additions, f.deletions) > 0.6
); );
if (highChurnFiles.length >= 3) { if (highChurnFiles.length >= 3) {
signals.push({ name: `high churn in ${highChurnFiles.length} files (possible refactor)`, positive: false, weight: 0.5 }); signals.push({
name: `high churn in ${highChurnFiles.length} files (possible refactor)`,
positive: false,
weight: 0.5,
});
} }
// --- Risky files --- // --- Risky files ---
@ -180,7 +201,11 @@ export class DiffBackend implements EngineBackend {
const totalSignalWeight = positiveWeight + negativeWeight; const totalSignalWeight = positiveWeight + negativeWeight;
const confidence = const confidence =
signals.length > 0 signals.length > 0
? Math.min(1, Math.abs(positiveWeight - negativeWeight) / Math.max(totalSignalWeight, 1) * 0.6 + 0.25) ? Math.min(
1,
(Math.abs(positiveWeight - negativeWeight) / Math.max(totalSignalWeight, 1)) * 0.6 +
0.25
)
: 0; : 0;
// Build reasoning // Build reasoning

View file

@ -44,7 +44,11 @@ export class QualityBackend implements EngineBackend {
if (body.length === 0) { if (body.length === 0) {
signals.push({ name: 'empty description', positive: false, weight: 2 }); signals.push({ name: 'empty description', positive: false, weight: 2 });
} else if (body.length < this.config.minBodyLength) { } else if (body.length < this.config.minBodyLength) {
signals.push({ name: `short description (${body.length} chars)`, positive: false, weight: 1.2 }); signals.push({
name: `short description (${body.length} chars)`,
positive: false,
weight: 1.2,
});
} else if (body.length >= this.config.minBodyLength) { } else if (body.length >= this.config.minBodyLength) {
signals.push({ name: 'adequate description', positive: true, weight: 1 }); signals.push({ name: 'adequate description', positive: true, weight: 1 });
if (body.length > 300) { if (body.length > 300) {
@ -68,7 +72,11 @@ export class QualityBackend implements EngineBackend {
if (total > 0 && checked === total) { if (total > 0 && checked === total) {
signals.push({ name: `checklist complete (${total}/${total})`, positive: true, weight: 1 }); signals.push({ name: `checklist complete (${total}/${total})`, positive: true, weight: 1 });
} else if (total > 0) { } else if (total > 0) {
signals.push({ name: `checklist incomplete (${checked}/${total})`, positive: false, weight: 0.8 }); signals.push({
name: `checklist incomplete (${checked}/${total})`,
positive: false,
weight: 0.8,
});
} }
} }
@ -79,14 +87,22 @@ export class QualityBackend implements EngineBackend {
if (body.length > 100 && BREAKING_PATTERN.test(body)) { if (body.length > 100 && BREAKING_PATTERN.test(body)) {
signals.push({ name: 'breaking change documented', positive: true, weight: 0.8 }); signals.push({ name: 'breaking change documented', positive: true, weight: 0.8 });
} else { } else {
signals.push({ name: 'breaking change mentioned but not detailed', positive: false, weight: 0.8 }); signals.push({
name: 'breaking change mentioned but not detailed',
positive: false,
weight: 0.8,
});
} }
} }
// TODOs/FIXMEs in description suggest unfinished work // TODOs/FIXMEs in description suggest unfinished work
const todoMatches = body.match(TODO_PATTERN); const todoMatches = body.match(TODO_PATTERN);
if (todoMatches) { if (todoMatches) {
signals.push({ name: `unfinished markers in description (${todoMatches.length})`, positive: false, weight: 0.6 }); signals.push({
name: `unfinished markers in description (${todoMatches.length})`,
positive: false,
weight: 0.6,
});
} }
// --- Type-specific signals --- // --- Type-specific signals ---
@ -100,7 +116,9 @@ export class QualityBackend implements EngineBackend {
signals.push({ name: 'has expected/actual behavior', positive: true, weight: 1.2 }); signals.push({ name: 'has expected/actual behavior', positive: true, weight: 1.2 });
} }
if (/\b(version|environment|os|platform|browser|node|python|java|rust|go)\s*[:\d]/i.test(body)) { if (
/\b(version|environment|os|platform|browser|node|python|java|rust|go)\s*[:\d]/i.test(body)
) {
signals.push({ name: 'has environment details', positive: true, weight: 1 }); signals.push({ name: 'has environment details', positive: true, weight: 1 });
} }
@ -140,7 +158,11 @@ export class QualityBackend implements EngineBackend {
// Shared: references to other issues/PRs // Shared: references to other issues/PRs
const refs = body.match(/#\d+/g); const refs = body.match(/#\d+/g);
if (refs && refs.length > 0) { if (refs && refs.length > 0) {
signals.push({ name: `references ${refs.length} issue(s)/PR(s)`, positive: true, weight: 0.6 }); signals.push({
name: `references ${refs.length} issue(s)/PR(s)`,
positive: true,
weight: 0.6,
});
} }
// Screenshots or images // Screenshots or images
@ -169,7 +191,7 @@ export class QualityBackend implements EngineBackend {
const totalWeight = positiveWeight + negativeWeight; const totalWeight = positiveWeight + negativeWeight;
const confidence = Math.min( const confidence = Math.min(
1, 1,
Math.abs(positiveWeight - negativeWeight) / Math.max(totalWeight, 1) * 0.5 + 0.2 (Math.abs(positiveWeight - negativeWeight) / Math.max(totalWeight, 1)) * 0.5 + 0.2
); );
const reasoning = `Quality: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}.`; const reasoning = `Quality: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}.`;

View file

@ -16,8 +16,7 @@ export function isDryRun(): boolean {
return octokit === null; return octokit === null;
} }
// --- Comment operations --- // Comment operations
export async function postComment( export async function postComment(
owner: string, owner: string,
repo: string, repo: string,
@ -70,8 +69,7 @@ export async function updateComment(
getLogger().info(`Updated comment ${commentId} on ${owner}/${repo}`); getLogger().info(`Updated comment ${commentId} on ${owner}/${repo}`);
} }
// --- Data fetching for engine backends --- // Data fetching for engine backends
export async function fetchCheckRuns( export async function fetchCheckRuns(
owner: string, owner: string,
repo: string, repo: string,
@ -146,8 +144,74 @@ export async function fetchPR(
}; };
} }
// --- Comment formatting --- export async function fetchIssue(
owner: string,
repo: string,
issueNumber: number
): Promise<{
title: string;
body: string;
author: string;
labels: string[];
} | null> {
if (!octokit) return null;
const { data } = await octokit.issues.get({ owner, repo, issue_number: issueNumber });
return {
title: data.title,
body: data.body || '',
author: data.user?.login || '',
labels: (data.labels || []).map((l) => (typeof l === 'string' ? l : l.name || '')),
};
}
export interface RecentComment {
id: number;
body: string;
author: string;
createdAt: string;
issueNumber: number;
isPullRequest: boolean;
}
export async function listRecentComments(
owner: string,
repo: string,
since: Date
): Promise<RecentComment[]> {
if (!octokit) {
getLogger().debug('[dry-run] Cannot fetch comments without a token');
return [];
}
const sinceIso = since.toISOString();
const comments: RecentComment[] = [];
// Fetch recent issue comments
const issueComments = await octokit.paginate(octokit.issues.listCommentsForRepo, {
owner,
repo,
since: sinceIso,
per_page: 100,
});
for (const comment of issueComments) {
if (!comment.body) continue;
comments.push({
id: comment.id,
body: comment.body,
author: comment.user?.login || '',
createdAt: comment.created_at,
issueNumber: comment.issue_url ? parseInt(comment.issue_url.split('/').pop() || '0', 10) : 0,
isPullRequest: false, // we'll determine this by fetching the issue
});
}
return comments;
}
// Comment formatting
function pickRandom(list: string[]): string { function pickRandom(list: string[]): string {
return list[Math.floor(Math.random() * list.length)]; return list[Math.floor(Math.random() * list.length)];
} }

View file

@ -10,6 +10,7 @@ import {
} from './github.js'; } from './github.js';
import { createApp } from './server.js'; import { createApp } from './server.js';
import { createEngine } from './engine/index.js'; import { createEngine } from './engine/index.js';
import { startPolling } from './polling.js';
import type { WebhookEvent } from './types.js'; import type { WebhookEvent } from './types.js';
async function analyzeOne(target: string) { async function analyzeOne(target: string) {
@ -95,7 +96,9 @@ function serve() {
); );
} }
if (!process.env.WEBHOOK_SECRET) { if (!process.env.WEBHOOK_SECRET) {
logger.warn('No WEBHOOK_SECRET - webhook signature verification is disabled'); logger.warn(
'No WEBHOOK_SECRET - webhook signature verification is disabled (not needed for polling-only mode)'
);
} }
const app = createApp(config); const app = createApp(config);
@ -105,7 +108,7 @@ function serve() {
.filter(([, v]) => v.enabled) .filter(([, v]) => v.enabled)
.map(([k]) => k); .map(([k]) => k);
const server = app.listen(port, () => { const server = app.listen(port, async () => {
logger.info(`Troutbot listening on port ${port}`); logger.info(`Troutbot listening on port ${port}`);
logger.info(`Enabled backends: ${enabledBackends.join(', ')}`); logger.info(`Enabled backends: ${enabledBackends.join(', ')}`);
@ -138,6 +141,9 @@ function serve() {
logger.info(`Comment updates: ${config.response.allowUpdates ? 'enabled' : 'disabled'}`); logger.info(`Comment updates: ${config.response.allowUpdates ? 'enabled' : 'disabled'}`);
logger.info(`Dashboard available at http://localhost:${port}/dashboard`); logger.info(`Dashboard available at http://localhost:${port}/dashboard`);
// Start polling if enabled
await startPolling(config);
}); });
function shutdown(signal: string) { function shutdown(signal: string) {

223
src/polling.ts Normal file
View file

@ -0,0 +1,223 @@
import type { Config, WebhookEvent } from './types.js';
import {
listRecentComments,
fetchPR,
fetchIssue,
hasExistingComment,
postComment,
updateComment,
formatComment,
type RecentComment,
} from './github.js';
import { createEngine } from './engine/index.js';
import { getLogger } from './logger.js';
import { recordEvent } from './events.js';
interface ProcessedComment {
id: number;
timestamp: number;
}
const processedComments: Map<string, ProcessedComment> = new Map();
const MAX_PROCESSED_CACHE = 1000;
function getCacheKey(owner: string, repo: string, commentId: number): string {
return `${owner}/${repo}#${commentId}`;
}
function isProcessed(owner: string, repo: string, commentId: number): boolean {
return processedComments.has(getCacheKey(owner, repo, commentId));
}
function markProcessed(owner: string, repo: string, commentId: number): void {
const key = getCacheKey(owner, repo, commentId);
processedComments.set(key, { id: commentId, timestamp: Date.now() });
// Clean up old entries if cache is too large
if (processedComments.size > MAX_PROCESSED_CACHE) {
const entries = Array.from(processedComments.entries());
entries.sort((a, b) => a[1].timestamp - b[1].timestamp);
const toRemove = entries.slice(0, entries.length - MAX_PROCESSED_CACHE);
for (const [k] of toRemove) {
processedComments.delete(k);
}
}
}
function containsMention(body: string): boolean {
return body.includes('@troutbot');
}
async function analyzeAndComment(
event: WebhookEvent,
config: Config
): Promise<Record<string, unknown>> {
const logger = getLogger();
const engine = createEngine(config.engine);
// Run analysis
const analysis = await engine.analyze(event);
logger.info(
`Analyzed ${event.owner}/${event.repo}#${event.number}: impact=${analysis.impact}, confidence=${analysis.confidence.toFixed(2)}`
);
// Check for existing comment
const { commentMarker, allowUpdates } = config.response;
const existing = await hasExistingComment(event.owner, event.repo, event.number, commentMarker);
if (existing.exists && !allowUpdates) {
logger.info(`Already commented on ${event.owner}/${event.repo}#${event.number}, skipping`);
const result = { skipped: true, reason: 'Already commented' };
recordEvent(event, result, analysis);
return result;
}
const body = formatComment(
config.response,
event.type,
analysis.impact,
analysis.confidence,
analysis.reasoning
);
if (existing.exists && allowUpdates && existing.commentId) {
logger.info(`Updating existing comment on ${event.owner}/${event.repo}#${event.number}`);
await updateComment(event.owner, event.repo, existing.commentId, body);
} else {
await postComment(event.owner, event.repo, event.number, body);
}
const result = { processed: true, impact: analysis.impact, confidence: analysis.confidence };
recordEvent(event, result, analysis);
return result;
}
async function processComment(
comment: RecentComment,
owner: string,
repo: string,
config: Config
): Promise<void> {
const logger = getLogger();
if (!containsMention(comment.body)) {
return;
}
if (isProcessed(owner, repo, comment.id)) {
logger.debug(`Comment ${owner}/${repo}#${comment.id} already processed, skipping`);
return;
}
logger.info(`Found @troutbot mention in ${owner}/${repo}#${comment.issueNumber}`);
try {
// First, try to fetch as a PR to check if it's a pull request
const prData = await fetchPR(owner, repo, comment.issueNumber);
let event: WebhookEvent;
if (prData) {
// It's a pull request
event = {
action: 'on_demand',
type: 'pull_request',
number: comment.issueNumber,
title: prData.title,
body: prData.body,
owner,
repo,
author: prData.author,
labels: prData.labels,
branch: prData.branch,
sha: prData.sha,
};
} else {
// It's an issue
const issueData = await fetchIssue(owner, repo, comment.issueNumber);
if (!issueData) {
logger.warn(`Could not fetch issue ${owner}/${repo}#${comment.issueNumber}`);
return;
}
event = {
action: 'on_demand',
type: 'issue',
number: comment.issueNumber,
title: issueData.title,
body: issueData.body,
owner,
repo,
author: issueData.author,
labels: issueData.labels,
};
}
await analyzeAndComment(event, config);
markProcessed(owner, repo, comment.id);
logger.info(
`Successfully processed on-demand analysis for ${owner}/${repo}#${comment.issueNumber}`
);
} catch (err) {
logger.error(`Failed to process mention in ${owner}/${repo}#${comment.issueNumber}`, err);
}
}
async function pollRepository(
owner: string,
repo: string,
config: Config,
since: Date
): Promise<void> {
const logger = getLogger();
try {
const comments = await listRecentComments(owner, repo, since);
logger.debug(`Fetched ${comments.length} recent comments from ${owner}/${repo}`);
for (const comment of comments) {
await processComment(comment, owner, repo, config);
}
} catch (err) {
logger.error(`Failed to poll ${owner}/${repo}`, err);
}
}
export async function startPolling(config: Config): Promise<void> {
const logger = getLogger();
const pollingConfig = config.polling;
if (!pollingConfig || !pollingConfig.enabled) {
logger.info('Polling is disabled');
return;
}
if (config.repositories.length === 0) {
logger.warn('Polling enabled but no repositories configured');
return;
}
const intervalMs = pollingConfig.intervalMinutes * 60 * 1000;
const lookbackMs = pollingConfig.lookbackMinutes * 60 * 1000;
logger.info(`Starting polling for ${config.repositories.length} repositories`);
logger.info(
`Poll interval: ${pollingConfig.intervalMinutes} minutes, lookback: ${pollingConfig.lookbackMinutes} minutes`
);
// Do an initial poll
const initialSince = new Date(Date.now() - lookbackMs);
for (const repo of config.repositories) {
await pollRepository(repo.owner, repo.repo, config, initialSince);
}
// Set up recurring polling
setInterval(async () => {
const since = new Date(Date.now() - lookbackMs);
for (const repo of config.repositories) {
await pollRepository(repo.owner, repo.repo, config, since);
}
}, intervalMs);
}

View file

@ -1,11 +1,12 @@
import crypto from 'node:crypto'; import crypto from 'node:crypto';
import express from 'express'; import express from 'express';
import rateLimit from 'express-rate-limit'; import rateLimit from 'express-rate-limit';
import type { Config, WebhookEvent, AnalysisResult } from './types.js'; import type { Config, WebhookEvent } from './types.js';
import { shouldProcess } from './filters.js'; import { shouldProcess } from './filters.js';
import { createEngine } from './engine/index.js'; import { createEngine } from './engine/index.js';
import { import {
fetchPR, fetchPR,
fetchIssue,
formatComment, formatComment,
hasExistingComment, hasExistingComment,
postComment, postComment,
@ -96,6 +97,21 @@ export function createApp(config: Config): express.Express {
return; return;
} }
// Handle issue_comment with @troutbot mention - on-demand analysis
if (
eventType === 'issue_comment' &&
['created', 'edited'].includes(payload.action as string)
) {
const commentBody = (payload.comment as Record<string, unknown>).body as string;
if (commentBody && commentBody.includes('@troutbot')) {
const result = await handleOnDemandAnalysis(payload, config, engine);
res.json(result);
return;
}
res.json({ skipped: true, reason: 'Comment does not mention @troutbot' });
return;
}
if (eventType !== 'issues' && eventType !== 'pull_request') { if (eventType !== 'issues' && eventType !== 'pull_request') {
res.json({ skipped: true, reason: `Unhandled event: ${eventType}` }); res.json({ skipped: true, reason: `Unhandled event: ${eventType}` });
return; return;
@ -241,6 +257,77 @@ async function handleCheckSuiteCompleted(
} }
} }
async function handleOnDemandAnalysis(
payload: Record<string, unknown>,
config: Config,
engine: ReturnType<typeof createEngine>
): Promise<Record<string, unknown>> {
const logger = getLogger();
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
const repoName = repo.name as string;
const issue = payload.issue as Record<string, unknown>;
const issueNumber = issue.number as number;
const isPullRequest = issue.pull_request !== undefined;
logger.info(
`On-demand analysis triggered for ${owner}/${repoName}#${issueNumber} (${isPullRequest ? 'PR' : 'issue'})`
);
try {
let event: WebhookEvent;
if (isPullRequest) {
const prData = await fetchPR(owner, repoName, issueNumber);
if (!prData) {
logger.warn(`Could not fetch PR ${owner}/${repoName}#${issueNumber}`);
return { skipped: true, reason: 'Could not fetch PR data' };
}
event = {
action: 'on_demand',
type: 'pull_request',
number: issueNumber,
title: prData.title,
body: prData.body,
owner,
repo: repoName,
author: prData.author,
labels: prData.labels,
branch: prData.branch,
sha: prData.sha,
};
} else {
const issueData = await fetchIssue(owner, repoName, issueNumber);
if (!issueData) {
logger.warn(`Could not fetch issue ${owner}/${repoName}#${issueNumber}`);
return { skipped: true, reason: 'Could not fetch issue data' };
}
event = {
action: 'on_demand',
type: 'issue',
number: issueNumber,
title: issueData.title,
body: issueData.body,
owner,
repo: repoName,
author: issueData.author,
labels: issueData.labels,
};
}
return await analyzeAndComment(event, config, engine);
} catch (err) {
logger.error(
`Failed to process on-demand analysis for ${owner}/${repoName}#${issueNumber}`,
err
);
return { error: 'Internal server error' };
}
}
function parseEvent(eventType: string, payload: Record<string, unknown>): WebhookEvent | null { function parseEvent(eventType: string, payload: Record<string, unknown>): WebhookEvent | null {
try { try {
if (eventType === 'issues') { if (eventType === 'issues') {

View file

@ -5,6 +5,13 @@ export interface Config {
engine: EngineConfig; engine: EngineConfig;
response: ResponseConfig; response: ResponseConfig;
logging: LoggingConfig; logging: LoggingConfig;
polling?: PollingConfig;
}
export interface PollingConfig {
enabled: boolean;
intervalMinutes: number;
lookbackMinutes: number;
} }
export interface ServerConfig { export interface ServerConfig {