diff --git a/src/dashboard.ts b/src/dashboard.ts index 3dd7d99..64cd5fc 100644 --- a/src/dashboard.ts +++ b/src/dashboard.ts @@ -9,8 +9,7 @@ export function createDashboardRouter(config: Config): express.Router { router.use(express.json()); - // --- API routes --- - + // API routes router.get('/api/status', (_req, res) => { const enabledBackends = Object.entries(config.engine.backends) .filter(([, v]) => v.enabled) @@ -41,7 +40,10 @@ export function createDashboardRouter(config: Config): express.Router { router.put('/api/config', (req, res) => { try { const partial = req.body as Partial; - const merged = deepMerge(config as Record, partial as Record) as Config; + const merged = deepMerge( + config as Record, + partial as Record + ) as Config; validate(merged); // Apply in-place diff --git a/src/engine/checks.ts b/src/engine/checks.ts index 9c035ff..f43b406 100644 --- a/src/engine/checks.ts +++ b/src/engine/checks.ts @@ -81,20 +81,17 @@ export class ChecksBackend implements EngineBackend { // Classify failures by severity const criticalFailures = failed.filter((r) => classifyCheck(r.name) === 'critical'); const advisoryFailures = failed.filter((r) => classifyCheck(r.name) === 'advisory'); - const standardFailures = failed.filter( - (r) => classifyCheck(r.name) === 'standard' - ); + const standardFailures = failed.filter((r) => classifyCheck(r.name) === 'standard'); // Weighted scoring: critical failures count 3x, advisory 0.5x const failureScore = criticalFailures.length * 3 + standardFailures.length * 1 + advisoryFailures.length * 0.5; - const totalWeight = - completed - .filter((r) => !skipped.includes(r)) - .reduce((s, r) => { - const cls = classifyCheck(r.name); - return s + (cls === 'critical' ? 3 : cls === 'advisory' ? 0.5 : 1); - }, 0); + const totalWeight = completed + .filter((r) => !skipped.includes(r)) + .reduce((s, r) => { + const cls = classifyCheck(r.name); + return s + (cls === 'critical' ? 3 : cls === 'advisory' ? 0.5 : 1); + }, 0); const weightedPassRate = totalWeight > 0 ? 1 - failureScore / totalWeight : 0; @@ -117,13 +114,20 @@ export class ChecksBackend implements EngineBackend { // Build detailed reasoning const parts: string[] = []; - if (passed.length > 0) parts.push(`${passed.length} passed (${passed.map((r) => r.name).join(', ')})`); + if (passed.length > 0) + parts.push(`${passed.length} passed (${passed.map((r) => r.name).join(', ')})`); if (criticalFailures.length > 0) - parts.push(`${criticalFailures.length} critical failure(s) (${criticalFailures.map((r) => r.name).join(', ')})`); + parts.push( + `${criticalFailures.length} critical failure(s) (${criticalFailures.map((r) => r.name).join(', ')})` + ); if (advisoryFailures.length > 0) - parts.push(`${advisoryFailures.length} advisory failure(s) (${advisoryFailures.map((r) => r.name).join(', ')})`); + parts.push( + `${advisoryFailures.length} advisory failure(s) (${advisoryFailures.map((r) => r.name).join(', ')})` + ); if (standardFailures.length > 0) - parts.push(`${standardFailures.length} other failure(s) (${standardFailures.map((r) => r.name).join(', ')})`); + parts.push( + `${standardFailures.length} other failure(s) (${standardFailures.map((r) => r.name).join(', ')})` + ); if (skipped.length > 0) parts.push(`${skipped.length} skipped`); if (pending.length > 0) parts.push(`${pending.length} still running`); diff --git a/src/engine/diff.ts b/src/engine/diff.ts index c4ebba4..9c74cd5 100644 --- a/src/engine/diff.ts +++ b/src/engine/diff.ts @@ -15,7 +15,9 @@ const RISKY_FILE_PATTERN = const DOC_FILE_PATTERN = /\.(md|mdx|txt|rst|adoc)$|^(README|CHANGELOG|LICENSE|CONTRIBUTING)/i; -function categorizeFiles(files: { filename: string; additions: number; deletions: number; changes: number }[]) { +function categorizeFiles( + files: { filename: string; additions: number; deletions: number; changes: number }[] +) { const src: typeof files = []; const tests: typeof files = []; const generated: typeof files = []; @@ -89,7 +91,11 @@ export class DiffBackend implements EngineBackend { } else if (totalChanges <= this.config.maxChanges) { signals.push({ name: `large PR (${totalChanges} lines)`, positive: false, weight: 0.8 }); } else { - signals.push({ name: `very large PR (${totalChanges} lines, exceeds limit)`, positive: false, weight: 1.5 }); + signals.push({ + name: `very large PR (${totalChanges} lines, exceeds limit)`, + positive: false, + weight: 1.5, + }); } // --- Focus signals --- @@ -98,9 +104,17 @@ export class DiffBackend implements EngineBackend { } else if (meaningful.length <= 10) { signals.push({ name: 'focused changeset', positive: true, weight: 0.8 }); } else if (meaningful.length > 30) { - signals.push({ name: `sprawling changeset (${meaningful.length} files)`, positive: false, weight: 1.2 }); + signals.push({ + name: `sprawling changeset (${meaningful.length} files)`, + positive: false, + weight: 1.2, + }); } else if (meaningful.length > 20) { - signals.push({ name: `broad changeset (${meaningful.length} files)`, positive: false, weight: 0.6 }); + signals.push({ + name: `broad changeset (${meaningful.length} files)`, + positive: false, + weight: 0.6, + }); } // --- Test coverage --- @@ -129,10 +143,17 @@ export class DiffBackend implements EngineBackend { // --- Churn detection (files with high add+delete suggesting rewrites) --- const highChurnFiles = src.filter( - (f) => f.additions > 50 && f.deletions > 50 && Math.min(f.additions, f.deletions) / Math.max(f.additions, f.deletions) > 0.6 + (f) => + f.additions > 50 && + f.deletions > 50 && + Math.min(f.additions, f.deletions) / Math.max(f.additions, f.deletions) > 0.6 ); if (highChurnFiles.length >= 3) { - signals.push({ name: `high churn in ${highChurnFiles.length} files (possible refactor)`, positive: false, weight: 0.5 }); + signals.push({ + name: `high churn in ${highChurnFiles.length} files (possible refactor)`, + positive: false, + weight: 0.5, + }); } // --- Risky files --- @@ -180,7 +201,11 @@ export class DiffBackend implements EngineBackend { const totalSignalWeight = positiveWeight + negativeWeight; const confidence = signals.length > 0 - ? Math.min(1, Math.abs(positiveWeight - negativeWeight) / Math.max(totalSignalWeight, 1) * 0.6 + 0.25) + ? Math.min( + 1, + (Math.abs(positiveWeight - negativeWeight) / Math.max(totalSignalWeight, 1)) * 0.6 + + 0.25 + ) : 0; // Build reasoning diff --git a/src/engine/quality.ts b/src/engine/quality.ts index 6b069d3..4aba1dd 100644 --- a/src/engine/quality.ts +++ b/src/engine/quality.ts @@ -44,7 +44,11 @@ export class QualityBackend implements EngineBackend { if (body.length === 0) { signals.push({ name: 'empty description', positive: false, weight: 2 }); } else if (body.length < this.config.minBodyLength) { - signals.push({ name: `short description (${body.length} chars)`, positive: false, weight: 1.2 }); + signals.push({ + name: `short description (${body.length} chars)`, + positive: false, + weight: 1.2, + }); } else if (body.length >= this.config.minBodyLength) { signals.push({ name: 'adequate description', positive: true, weight: 1 }); if (body.length > 300) { @@ -68,7 +72,11 @@ export class QualityBackend implements EngineBackend { if (total > 0 && checked === total) { signals.push({ name: `checklist complete (${total}/${total})`, positive: true, weight: 1 }); } else if (total > 0) { - signals.push({ name: `checklist incomplete (${checked}/${total})`, positive: false, weight: 0.8 }); + signals.push({ + name: `checklist incomplete (${checked}/${total})`, + positive: false, + weight: 0.8, + }); } } @@ -79,14 +87,22 @@ export class QualityBackend implements EngineBackend { if (body.length > 100 && BREAKING_PATTERN.test(body)) { signals.push({ name: 'breaking change documented', positive: true, weight: 0.8 }); } else { - signals.push({ name: 'breaking change mentioned but not detailed', positive: false, weight: 0.8 }); + signals.push({ + name: 'breaking change mentioned but not detailed', + positive: false, + weight: 0.8, + }); } } // TODOs/FIXMEs in description suggest unfinished work const todoMatches = body.match(TODO_PATTERN); if (todoMatches) { - signals.push({ name: `unfinished markers in description (${todoMatches.length})`, positive: false, weight: 0.6 }); + signals.push({ + name: `unfinished markers in description (${todoMatches.length})`, + positive: false, + weight: 0.6, + }); } // --- Type-specific signals --- @@ -100,7 +116,9 @@ export class QualityBackend implements EngineBackend { signals.push({ name: 'has expected/actual behavior', positive: true, weight: 1.2 }); } - if (/\b(version|environment|os|platform|browser|node|python|java|rust|go)\s*[:\d]/i.test(body)) { + if ( + /\b(version|environment|os|platform|browser|node|python|java|rust|go)\s*[:\d]/i.test(body) + ) { signals.push({ name: 'has environment details', positive: true, weight: 1 }); } @@ -140,7 +158,11 @@ export class QualityBackend implements EngineBackend { // Shared: references to other issues/PRs const refs = body.match(/#\d+/g); if (refs && refs.length > 0) { - signals.push({ name: `references ${refs.length} issue(s)/PR(s)`, positive: true, weight: 0.6 }); + signals.push({ + name: `references ${refs.length} issue(s)/PR(s)`, + positive: true, + weight: 0.6, + }); } // Screenshots or images @@ -169,7 +191,7 @@ export class QualityBackend implements EngineBackend { const totalWeight = positiveWeight + negativeWeight; const confidence = Math.min( 1, - Math.abs(positiveWeight - negativeWeight) / Math.max(totalWeight, 1) * 0.5 + 0.2 + (Math.abs(positiveWeight - negativeWeight) / Math.max(totalWeight, 1)) * 0.5 + 0.2 ); const reasoning = `Quality: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}.`; diff --git a/src/github.ts b/src/github.ts index 14348fa..13fe784 100644 --- a/src/github.ts +++ b/src/github.ts @@ -16,8 +16,7 @@ export function isDryRun(): boolean { return octokit === null; } -// --- Comment operations --- - +// Comment operations export async function postComment( owner: string, repo: string, @@ -70,8 +69,7 @@ export async function updateComment( getLogger().info(`Updated comment ${commentId} on ${owner}/${repo}`); } -// --- Data fetching for engine backends --- - +// Data fetching for engine backends export async function fetchCheckRuns( owner: string, repo: string, @@ -146,8 +144,74 @@ export async function fetchPR( }; } -// --- Comment formatting --- +export async function fetchIssue( + owner: string, + repo: string, + issueNumber: number +): Promise<{ + title: string; + body: string; + author: string; + labels: string[]; +} | null> { + if (!octokit) return null; + const { data } = await octokit.issues.get({ owner, repo, issue_number: issueNumber }); + return { + title: data.title, + body: data.body || '', + author: data.user?.login || '', + labels: (data.labels || []).map((l) => (typeof l === 'string' ? l : l.name || '')), + }; +} + +export interface RecentComment { + id: number; + body: string; + author: string; + createdAt: string; + issueNumber: number; + isPullRequest: boolean; +} + +export async function listRecentComments( + owner: string, + repo: string, + since: Date +): Promise { + if (!octokit) { + getLogger().debug('[dry-run] Cannot fetch comments without a token'); + return []; + } + + const sinceIso = since.toISOString(); + const comments: RecentComment[] = []; + + // Fetch recent issue comments + const issueComments = await octokit.paginate(octokit.issues.listCommentsForRepo, { + owner, + repo, + since: sinceIso, + per_page: 100, + }); + + for (const comment of issueComments) { + if (!comment.body) continue; + + comments.push({ + id: comment.id, + body: comment.body, + author: comment.user?.login || '', + createdAt: comment.created_at, + issueNumber: comment.issue_url ? parseInt(comment.issue_url.split('/').pop() || '0', 10) : 0, + isPullRequest: false, // we'll determine this by fetching the issue + }); + } + + return comments; +} + +// Comment formatting function pickRandom(list: string[]): string { return list[Math.floor(Math.random() * list.length)]; } diff --git a/src/index.ts b/src/index.ts index e944a4f..2e24cbb 100644 --- a/src/index.ts +++ b/src/index.ts @@ -10,6 +10,7 @@ import { } from './github.js'; import { createApp } from './server.js'; import { createEngine } from './engine/index.js'; +import { startPolling } from './polling.js'; import type { WebhookEvent } from './types.js'; async function analyzeOne(target: string) { @@ -95,7 +96,9 @@ function serve() { ); } if (!process.env.WEBHOOK_SECRET) { - logger.warn('No WEBHOOK_SECRET - webhook signature verification is disabled'); + logger.warn( + 'No WEBHOOK_SECRET - webhook signature verification is disabled (not needed for polling-only mode)' + ); } const app = createApp(config); @@ -105,7 +108,7 @@ function serve() { .filter(([, v]) => v.enabled) .map(([k]) => k); - const server = app.listen(port, () => { + const server = app.listen(port, async () => { logger.info(`Troutbot listening on port ${port}`); logger.info(`Enabled backends: ${enabledBackends.join(', ')}`); @@ -138,6 +141,9 @@ function serve() { logger.info(`Comment updates: ${config.response.allowUpdates ? 'enabled' : 'disabled'}`); logger.info(`Dashboard available at http://localhost:${port}/dashboard`); + + // Start polling if enabled + await startPolling(config); }); function shutdown(signal: string) { diff --git a/src/polling.ts b/src/polling.ts new file mode 100644 index 0000000..090dce6 --- /dev/null +++ b/src/polling.ts @@ -0,0 +1,223 @@ +import type { Config, WebhookEvent } from './types.js'; +import { + listRecentComments, + fetchPR, + fetchIssue, + hasExistingComment, + postComment, + updateComment, + formatComment, + type RecentComment, +} from './github.js'; +import { createEngine } from './engine/index.js'; +import { getLogger } from './logger.js'; +import { recordEvent } from './events.js'; + +interface ProcessedComment { + id: number; + timestamp: number; +} + +const processedComments: Map = new Map(); +const MAX_PROCESSED_CACHE = 1000; + +function getCacheKey(owner: string, repo: string, commentId: number): string { + return `${owner}/${repo}#${commentId}`; +} + +function isProcessed(owner: string, repo: string, commentId: number): boolean { + return processedComments.has(getCacheKey(owner, repo, commentId)); +} + +function markProcessed(owner: string, repo: string, commentId: number): void { + const key = getCacheKey(owner, repo, commentId); + processedComments.set(key, { id: commentId, timestamp: Date.now() }); + + // Clean up old entries if cache is too large + if (processedComments.size > MAX_PROCESSED_CACHE) { + const entries = Array.from(processedComments.entries()); + entries.sort((a, b) => a[1].timestamp - b[1].timestamp); + const toRemove = entries.slice(0, entries.length - MAX_PROCESSED_CACHE); + for (const [k] of toRemove) { + processedComments.delete(k); + } + } +} + +function containsMention(body: string): boolean { + return body.includes('@troutbot'); +} + +async function analyzeAndComment( + event: WebhookEvent, + config: Config +): Promise> { + const logger = getLogger(); + const engine = createEngine(config.engine); + + // Run analysis + const analysis = await engine.analyze(event); + logger.info( + `Analyzed ${event.owner}/${event.repo}#${event.number}: impact=${analysis.impact}, confidence=${analysis.confidence.toFixed(2)}` + ); + + // Check for existing comment + const { commentMarker, allowUpdates } = config.response; + const existing = await hasExistingComment(event.owner, event.repo, event.number, commentMarker); + + if (existing.exists && !allowUpdates) { + logger.info(`Already commented on ${event.owner}/${event.repo}#${event.number}, skipping`); + const result = { skipped: true, reason: 'Already commented' }; + recordEvent(event, result, analysis); + return result; + } + + const body = formatComment( + config.response, + event.type, + analysis.impact, + analysis.confidence, + analysis.reasoning + ); + + if (existing.exists && allowUpdates && existing.commentId) { + logger.info(`Updating existing comment on ${event.owner}/${event.repo}#${event.number}`); + await updateComment(event.owner, event.repo, existing.commentId, body); + } else { + await postComment(event.owner, event.repo, event.number, body); + } + + const result = { processed: true, impact: analysis.impact, confidence: analysis.confidence }; + recordEvent(event, result, analysis); + return result; +} + +async function processComment( + comment: RecentComment, + owner: string, + repo: string, + config: Config +): Promise { + const logger = getLogger(); + + if (!containsMention(comment.body)) { + return; + } + + if (isProcessed(owner, repo, comment.id)) { + logger.debug(`Comment ${owner}/${repo}#${comment.id} already processed, skipping`); + return; + } + + logger.info(`Found @troutbot mention in ${owner}/${repo}#${comment.issueNumber}`); + + try { + // First, try to fetch as a PR to check if it's a pull request + const prData = await fetchPR(owner, repo, comment.issueNumber); + + let event: WebhookEvent; + + if (prData) { + // It's a pull request + event = { + action: 'on_demand', + type: 'pull_request', + number: comment.issueNumber, + title: prData.title, + body: prData.body, + owner, + repo, + author: prData.author, + labels: prData.labels, + branch: prData.branch, + sha: prData.sha, + }; + } else { + // It's an issue + const issueData = await fetchIssue(owner, repo, comment.issueNumber); + if (!issueData) { + logger.warn(`Could not fetch issue ${owner}/${repo}#${comment.issueNumber}`); + return; + } + + event = { + action: 'on_demand', + type: 'issue', + number: comment.issueNumber, + title: issueData.title, + body: issueData.body, + owner, + repo, + author: issueData.author, + labels: issueData.labels, + }; + } + + await analyzeAndComment(event, config); + markProcessed(owner, repo, comment.id); + + logger.info( + `Successfully processed on-demand analysis for ${owner}/${repo}#${comment.issueNumber}` + ); + } catch (err) { + logger.error(`Failed to process mention in ${owner}/${repo}#${comment.issueNumber}`, err); + } +} + +async function pollRepository( + owner: string, + repo: string, + config: Config, + since: Date +): Promise { + const logger = getLogger(); + + try { + const comments = await listRecentComments(owner, repo, since); + logger.debug(`Fetched ${comments.length} recent comments from ${owner}/${repo}`); + + for (const comment of comments) { + await processComment(comment, owner, repo, config); + } + } catch (err) { + logger.error(`Failed to poll ${owner}/${repo}`, err); + } +} + +export async function startPolling(config: Config): Promise { + const logger = getLogger(); + const pollingConfig = config.polling; + + if (!pollingConfig || !pollingConfig.enabled) { + logger.info('Polling is disabled'); + return; + } + + if (config.repositories.length === 0) { + logger.warn('Polling enabled but no repositories configured'); + return; + } + + const intervalMs = pollingConfig.intervalMinutes * 60 * 1000; + const lookbackMs = pollingConfig.lookbackMinutes * 60 * 1000; + + logger.info(`Starting polling for ${config.repositories.length} repositories`); + logger.info( + `Poll interval: ${pollingConfig.intervalMinutes} minutes, lookback: ${pollingConfig.lookbackMinutes} minutes` + ); + + // Do an initial poll + const initialSince = new Date(Date.now() - lookbackMs); + for (const repo of config.repositories) { + await pollRepository(repo.owner, repo.repo, config, initialSince); + } + + // Set up recurring polling + setInterval(async () => { + const since = new Date(Date.now() - lookbackMs); + + for (const repo of config.repositories) { + await pollRepository(repo.owner, repo.repo, config, since); + } + }, intervalMs); +} diff --git a/src/server.ts b/src/server.ts index b97e3b0..b0cc53d 100644 --- a/src/server.ts +++ b/src/server.ts @@ -1,11 +1,12 @@ import crypto from 'node:crypto'; import express from 'express'; import rateLimit from 'express-rate-limit'; -import type { Config, WebhookEvent, AnalysisResult } from './types.js'; +import type { Config, WebhookEvent } from './types.js'; import { shouldProcess } from './filters.js'; import { createEngine } from './engine/index.js'; import { fetchPR, + fetchIssue, formatComment, hasExistingComment, postComment, @@ -96,6 +97,21 @@ export function createApp(config: Config): express.Express { return; } + // Handle issue_comment with @troutbot mention - on-demand analysis + if ( + eventType === 'issue_comment' && + ['created', 'edited'].includes(payload.action as string) + ) { + const commentBody = (payload.comment as Record).body as string; + if (commentBody && commentBody.includes('@troutbot')) { + const result = await handleOnDemandAnalysis(payload, config, engine); + res.json(result); + return; + } + res.json({ skipped: true, reason: 'Comment does not mention @troutbot' }); + return; + } + if (eventType !== 'issues' && eventType !== 'pull_request') { res.json({ skipped: true, reason: `Unhandled event: ${eventType}` }); return; @@ -241,6 +257,77 @@ async function handleCheckSuiteCompleted( } } +async function handleOnDemandAnalysis( + payload: Record, + config: Config, + engine: ReturnType +): Promise> { + const logger = getLogger(); + const repo = payload.repository as Record; + const owner = (repo.owner as Record).login as string; + const repoName = repo.name as string; + + const issue = payload.issue as Record; + const issueNumber = issue.number as number; + const isPullRequest = issue.pull_request !== undefined; + + logger.info( + `On-demand analysis triggered for ${owner}/${repoName}#${issueNumber} (${isPullRequest ? 'PR' : 'issue'})` + ); + + try { + let event: WebhookEvent; + + if (isPullRequest) { + const prData = await fetchPR(owner, repoName, issueNumber); + if (!prData) { + logger.warn(`Could not fetch PR ${owner}/${repoName}#${issueNumber}`); + return { skipped: true, reason: 'Could not fetch PR data' }; + } + + event = { + action: 'on_demand', + type: 'pull_request', + number: issueNumber, + title: prData.title, + body: prData.body, + owner, + repo: repoName, + author: prData.author, + labels: prData.labels, + branch: prData.branch, + sha: prData.sha, + }; + } else { + const issueData = await fetchIssue(owner, repoName, issueNumber); + if (!issueData) { + logger.warn(`Could not fetch issue ${owner}/${repoName}#${issueNumber}`); + return { skipped: true, reason: 'Could not fetch issue data' }; + } + + event = { + action: 'on_demand', + type: 'issue', + number: issueNumber, + title: issueData.title, + body: issueData.body, + owner, + repo: repoName, + author: issueData.author, + labels: issueData.labels, + }; + } + + return await analyzeAndComment(event, config, engine); + } catch (err) { + logger.error( + `Failed to process on-demand analysis for ${owner}/${repoName}#${issueNumber}`, + err + ); + return { error: 'Internal server error' }; + } +} + function parseEvent(eventType: string, payload: Record): WebhookEvent | null { try { if (eventType === 'issues') { diff --git a/src/types.ts b/src/types.ts index a84bd30..ab07dff 100644 --- a/src/types.ts +++ b/src/types.ts @@ -5,6 +5,13 @@ export interface Config { engine: EngineConfig; response: ResponseConfig; logging: LoggingConfig; + polling?: PollingConfig; +} + +export interface PollingConfig { + enabled: boolean; + intervalMinutes: number; + lookbackMinutes: number; } export interface ServerConfig {