initial commit

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ic08e7c4b5b4f4072de9e2f9a701e977b6a6a6964
This commit is contained in:
raf 2026-01-30 16:46:39 +03:00
commit f8db097ba9
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
21 changed files with 4924 additions and 0 deletions

5
.env.example Normal file
View file

@ -0,0 +1,5 @@
GITHUB_TOKEN=ghp_your_personal_access_token
WEBHOOK_SECRET=your_webhook_secret
PORT=3000
CONFIG_PATH=config.ts
LOG_LEVEL=info

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
node_modules/
dist/

94
config.example.ts Normal file
View file

@ -0,0 +1,94 @@
import type { Config } from './src/types';
const config: Config = {
server: {
port: 3000,
},
repositories: [
// Leave empty to accept webhooks from any repo.
// { owner: "myorg", repo: "myrepo" },
],
filters: {
labels: {
include: [],
exclude: ['bot-ignore'],
},
authors: {
exclude: ['dependabot', 'renovate[bot]'],
},
branches: {
include: [], // empty = all branches
},
},
engine: {
backends: {
// Queries GitHub Checks API for CI results (ESLint, Clippy, tests, builds).
// Requires GITHUB_TOKEN.
checks: { enabled: true },
// Analyzes PR diff: size, file count, test coverage, net additions/deletions.
// Requires GITHUB_TOKEN. Only applies to pull_request events.
diff: {
enabled: true,
maxChanges: 1000, // PRs above this line count are flagged as too large
requireTests: false, // set true to flag PRs with no test file changes
},
// Analyzes issue/PR body for structural quality: description length,
// code blocks, reproduction steps, linked issues, test plans.
// Works without a token (pure text analysis).
quality: {
enabled: true,
minBodyLength: 50, // minimum characters for an "adequate" description
},
},
// Relative importance of each backend when combining results.
weights: {
checks: 0.4,
diff: 0.3,
quality: 0.3,
},
// Below this combined confidence, classify as neutral.
confidenceThreshold: 0.1,
},
response: {
includeConfidence: true,
includeReasoning: true,
// One message is picked at random from the list matching the impact.
// Placeholders: {type} (issue/pull request), {impact} (positive/negative/neutral)
messages: {
positive: [
'This {type} looks great for the trout! All signals point upstream.',
'The trout approve of this {type}. Swim on!',
'Splashing good news - this {type} is looking healthy.',
],
negative: [
'This {type} is muddying the waters. The trout are concerned.',
'Warning: the trout sense trouble in this {type}.',
'Something smells fishy about this {type}. Please review.',
],
neutral: [
'The trout have no strong feelings about this {type}.',
'This {type} is neither upstream nor downstream. Neutral waters.',
'The trout are watching this {type} with mild interest.',
],
},
commentMarker: '<!-- troutbot -->',
allowUpdates: false, // set true to update comments when CI finishes (requires check_suite webhook)
},
logging: {
level: 'info',
file: 'troutbot.log',
},
};
export default config;

31
eslint.config.mjs Normal file
View file

@ -0,0 +1,31 @@
import tseslint from '@typescript-eslint/eslint-plugin';
import tsparser from '@typescript-eslint/parser';
export default [
{
files: ['**/*.ts', '**/*.tsx', '**/*.js', '**/*.jsx'],
ignores: ['dist/**', 'node_modules/**'],
languageOptions: {
ecmaVersion: 2022,
sourceType: 'module',
parser: tsparser,
globals: {
console: 'readonly',
process: 'readonly',
setTimeout: 'readonly',
setInterval: 'readonly',
clearTimeout: 'readonly',
clearInterval: 'readonly',
},
},
plugins: {
'@typescript-eslint': tseslint,
},
rules: {
...tseslint.configs.recommended.rules,
'@typescript-eslint/no-explicit-any': 'warn',
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
'no-console': 'off',
},
},
];

32
package.json Normal file
View file

@ -0,0 +1,32 @@
{
"name": "troutbot",
"version": "1.0.0",
"description": "GitHub webhook bot that analyzes issues and PRs for impact on the trout population",
"main": "dist/index.js",
"scripts": {
"build": "tsup src/index.ts --format cjs --dts --clean",
"start": "node dist/index.js",
"dev": "tsx src/index.ts",
"lint": "eslint .",
"fmt": "prettier --write ."
},
"dependencies": {
"@octokit/rest": "^22.0.1",
"dotenv": "^17.2.3",
"express": "^5.2.1",
"express-rate-limit": "^8.2.1",
"jiti": "^2.6.1",
"winston": "^3.19.0"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^25.1.0",
"@typescript-eslint/eslint-plugin": "^8.54.0",
"@typescript-eslint/parser": "^8.54.0",
"eslint": "^9.39.2",
"prettier": "^3.8.1",
"tsup": "^8.5.1",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
}

2734
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load diff

15
prettier.config.mjs Normal file
View file

@ -0,0 +1,15 @@
export default {
printWidth: 100,
tabWidth: 2,
useTabs: false,
semi: true,
singleQuote: true,
quoteProps: 'as-needed',
jsxSingleQuote: false,
trailingComma: 'es5',
bracketSpacing: true,
bracketSameLine: false,
arrowParens: 'always',
endOfLine: 'lf',
plugins: [],
};

145
src/config.ts Normal file
View file

@ -0,0 +1,145 @@
import fs from 'node:fs';
import path from 'node:path';
import { createJiti } from 'jiti';
import dotenv from 'dotenv';
import type { Config } from './types.js';
dotenv.config();
const jiti = createJiti(__filename, { interopDefault: true });
const defaults: Config = {
server: { port: 3000 },
repositories: [],
filters: {
labels: { include: [], exclude: [] },
authors: { exclude: [] },
branches: { include: [] },
},
engine: {
backends: {
checks: { enabled: true },
diff: { enabled: true, maxChanges: 1000, requireTests: false },
quality: { enabled: true, minBodyLength: 50 },
},
weights: {
checks: 0.4,
diff: 0.3,
quality: 0.3,
},
confidenceThreshold: 0.1,
},
response: {
includeConfidence: true,
includeReasoning: true,
messages: {
positive: [
'This {type} looks great for the trout! All signals point upstream.',
'The trout approve of this {type}. Swim on!',
'Splashing good news - this {type} is looking healthy.',
],
negative: [
'This {type} is muddying the waters. The trout are concerned.',
'Warning: the trout sense trouble in this {type}.',
'Something smells fishy about this {type}. Please review.',
],
neutral: [
'The trout have no strong feelings about this {type}.',
'This {type} is neither upstream nor downstream. Neutral waters.',
'The trout are watching this {type} with mild interest.',
],
},
commentMarker: '<!-- troutbot -->',
allowUpdates: false,
},
logging: {
level: 'info',
file: 'troutbot.log',
},
};
export function deepMerge<T extends Record<string, unknown>>(target: T, source: Partial<T>): T {
const result = { ...target };
for (const key of Object.keys(source) as (keyof T)[]) {
const sourceVal = source[key];
const targetVal = target[key];
if (
sourceVal !== null &&
sourceVal !== undefined &&
typeof sourceVal === 'object' &&
!Array.isArray(sourceVal) &&
typeof targetVal === 'object' &&
!Array.isArray(targetVal) &&
targetVal !== null
) {
result[key] = deepMerge(
targetVal as Record<string, unknown>,
sourceVal as Record<string, unknown>
) as T[keyof T];
} else if (sourceVal !== undefined) {
result[key] = sourceVal as T[keyof T];
}
}
return result;
}
export function loadConfig(): Config {
const configPath = process.env.CONFIG_PATH || 'config.ts';
const resolvedPath = path.resolve(configPath);
let fileConfig: Partial<Config> = {};
if (fs.existsSync(resolvedPath)) {
const loaded = jiti(resolvedPath) as Partial<Config> | { default: Partial<Config> };
fileConfig = 'default' in loaded ? loaded.default : loaded;
} else if (process.env.CONFIG_PATH) {
console.warn(
`Warning: CONFIG_PATH is set to "${process.env.CONFIG_PATH}" but file not found at ${resolvedPath}`
);
}
const config = deepMerge(defaults, fileConfig);
// Environment variable overrides
if (process.env.PORT) {
const parsed = parseInt(process.env.PORT, 10);
if (Number.isNaN(parsed)) {
throw new Error(`Invalid PORT value: "${process.env.PORT}" is not a number`);
}
config.server.port = parsed;
}
const validLogLevels = ['debug', 'info', 'warn', 'error'];
if (process.env.LOG_LEVEL) {
if (!validLogLevels.includes(process.env.LOG_LEVEL)) {
throw new Error(
`Invalid LOG_LEVEL: "${process.env.LOG_LEVEL}". Must be one of: ${validLogLevels.join(', ')}`
);
}
config.logging.level = process.env.LOG_LEVEL;
}
validate(config);
return config;
}
export function validate(config: Config): void {
if (!config.server.port || config.server.port < 1 || config.server.port > 65535) {
throw new Error('Invalid server port');
}
const { backends } = config.engine;
if (!backends.checks.enabled && !backends.diff.enabled && !backends.quality.enabled) {
throw new Error('At least one engine backend must be enabled');
}
const { weights } = config.engine;
for (const [key, value] of Object.entries(weights)) {
if (value < 0) {
throw new Error(`Backend weight "${key}" must be non-negative, got ${value}`);
}
}
if (config.engine.confidenceThreshold < 0 || config.engine.confidenceThreshold > 1) {
throw new Error('confidenceThreshold must be between 0 and 1');
}
}

312
src/dashboard.ts Normal file
View file

@ -0,0 +1,312 @@
import express from 'express';
import type { Config } from './types.js';
import { getRecentEvents, clearEvents } from './events.js';
import { validate, deepMerge } from './config.js';
export function createDashboardRouter(config: Config): express.Router {
const router = express.Router();
const startTime = Date.now();
router.use(express.json());
// --- API routes ---
router.get('/api/status', (_req, res) => {
const enabledBackends = Object.entries(config.engine.backends)
.filter(([, v]) => v.enabled)
.map(([k]) => k);
res.json({
uptime: Math.floor((Date.now() - startTime) / 1000),
version: process.env.npm_package_version ?? 'unknown',
dryRun: !process.env.GITHUB_TOKEN,
backends: enabledBackends,
repoCount: config.repositories.length || 'all',
});
});
router.get('/api/events', (_req, res) => {
res.json(getRecentEvents());
});
router.delete('/api/events', (_req, res) => {
clearEvents();
res.json({ cleared: true });
});
router.get('/api/config', (_req, res) => {
res.json(config);
});
router.put('/api/config', (req, res) => {
try {
const partial = req.body as Partial<Config>;
const merged = deepMerge(config as Record<string, unknown>, partial as Record<string, unknown>) as Config;
validate(merged);
// Apply in-place
Object.assign(config, merged);
res.json(config);
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
res.status(400).json({ error: message });
}
});
// --- Dashboard HTML ---
router.get('/dashboard', (_req, res) => {
res.type('html').send(dashboardHTML());
});
return router;
}
function dashboardHTML(): string {
return `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Troutbot Dashboard</title>
<style>
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
background: #0d1117; color: #c9d1d9; line-height: 1.5;
padding: 1.5rem; max-width: 1200px; margin: 0 auto;
}
h1 { color: #58a6ff; margin-bottom: 1.5rem; font-size: 1.5rem; }
h2 { color: #8b949e; font-size: 1rem; text-transform: uppercase;
letter-spacing: 0.05em; margin-bottom: 0.75rem; }
.card {
background: #161b22; border: 1px solid #30363d; border-radius: 6px;
padding: 1rem 1.25rem; margin-bottom: 1.5rem;
}
.status-grid {
display: grid; grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
gap: 0.75rem;
}
.status-item label { display: block; color: #8b949e; font-size: 0.75rem; }
.status-item span { font-size: 1.1rem; font-weight: 600; }
table { width: 100%; border-collapse: collapse; font-size: 0.85rem; }
th { text-align: left; color: #8b949e; font-weight: 600; padding: 0.5rem 0.75rem;
border-bottom: 1px solid #30363d; }
td { padding: 0.5rem 0.75rem; border-bottom: 1px solid #21262d; }
tr:hover td { background: #1c2128; }
.impact-positive { color: #3fb950; }
.impact-negative { color: #f85149; }
.impact-neutral { color: #8b949e; }
.config-view {
font-family: 'SF Mono', 'Fira Code', 'Fira Mono', Menlo, monospace;
font-size: 0.8rem; background: #0d1117; color: #c9d1d9;
border: 1px solid #30363d; border-radius: 4px; padding: 1rem;
white-space: pre-wrap; word-break: break-word; min-height: 200px;
width: 100%; resize: vertical;
}
.btn {
background: #21262d; color: #c9d1d9; border: 1px solid #30363d;
border-radius: 4px; padding: 0.4rem 1rem; cursor: pointer;
font-size: 0.85rem; margin-right: 0.5rem; margin-top: 0.5rem;
}
.btn:hover { background: #30363d; }
.btn-primary { background: #238636; border-color: #2ea043; }
.btn-primary:hover { background: #2ea043; }
.btn-danger { background: #da3633; border-color: #f85149; }
.btn-danger:hover { background: #f85149; }
.msg { margin-top: 0.5rem; font-size: 0.85rem; }
.msg-ok { color: #3fb950; }
.msg-err { color: #f85149; }
.empty { color: #484f58; font-style: italic; padding: 1rem 0; }
</style>
</head>
<body>
<h1>Troutbot Dashboard</h1>
<!-- Status card -->
<div class="card" id="status-card">
<h2>Status</h2>
<div class="status-grid" id="status-grid">
<div class="status-item"><label>Loading...</label></div>
</div>
</div>
<!-- Recent events -->
<div class="card">
<h2>Recent Events</h2>
<div style="overflow-x:auto">
<table>
<thead><tr>
<th>ID</th><th>Time</th><th>Repo</th><th>#</th>
<th>Action</th><th>Impact</th><th>Confidence</th><th>Result</th>
</tr></thead>
<tbody id="events-body">
<tr><td colspan="8" class="empty">Loading...</td></tr>
</tbody>
</table>
</div>
</div>
<!-- Config editor -->
<div class="card">
<h2>Configuration</h2>
<div id="config-container">
<pre class="config-view" id="config-view"></pre>
<div>
<button class="btn" id="edit-btn" onclick="toggleEdit()">Edit</button>
<button class="btn btn-primary" id="save-btn" style="display:none" onclick="saveConfig()">Save</button>
<button class="btn" id="cancel-btn" style="display:none" onclick="cancelEdit()">Cancel</button>
</div>
<div class="msg" id="config-msg"></div>
</div>
</div>
<script>
let currentConfig = null;
let editing = false;
async function fetchStatus() {
try {
const r = await fetch('/api/status');
const d = await r.json();
const grid = document.getElementById('status-grid');
const upH = Math.floor(d.uptime / 3600);
const upM = Math.floor((d.uptime % 3600) / 60);
const upS = d.uptime % 60;
grid.innerHTML = [
item('Uptime', upH + 'h ' + upM + 'm ' + upS + 's'),
item('Version', d.version),
item('Dry Run', d.dryRun ? 'Yes' : 'No'),
item('Backends', d.backends.join(', ')),
item('Repos', d.repoCount),
].join('');
} catch(e) { console.error('Status fetch failed', e); }
}
function item(label, value) {
return '<div class="status-item"><label>' + label + '</label><span>' + value + '</span></div>';
}
async function fetchEvents() {
try {
const r = await fetch('/api/events');
const events = await r.json();
const tbody = document.getElementById('events-body');
if (!events.length) {
tbody.innerHTML = '<tr><td colspan="8" class="empty">No events recorded yet</td></tr>';
return;
}
tbody.innerHTML = events.map(function(e) {
var impact = e.analysis ? e.analysis.impact : (e.result.skipped ? 'neutral' : '—');
var conf = e.analysis ? e.analysis.confidence.toFixed(2) : '—';
var result = e.result.skipped ? 'skipped: ' + (e.result.reason || '') : 'processed';
var time = new Date(e.timestamp).toLocaleTimeString();
return '<tr>'
+ '<td>' + e.id + '</td>'
+ '<td>' + time + '</td>'
+ '<td>' + e.event.owner + '/' + e.event.repo + '</td>'
+ '<td>' + e.event.number + '</td>'
+ '<td>' + e.event.action + '</td>'
+ '<td class="impact-' + impact + '">' + impact + '</td>'
+ '<td>' + conf + '</td>'
+ '<td>' + result + '</td>'
+ '</tr>';
}).join('');
} catch(e) { console.error('Events fetch failed', e); }
}
async function fetchConfig() {
try {
const r = await fetch('/api/config');
currentConfig = await r.json();
if (!editing) renderConfig();
} catch(e) { console.error('Config fetch failed', e); }
}
function renderConfig() {
var el = document.getElementById('config-view');
el.textContent = JSON.stringify(currentConfig, null, 2);
}
function toggleEdit() {
editing = true;
var container = document.getElementById('config-container');
var pre = document.getElementById('config-view');
var ta = document.createElement('textarea');
ta.className = 'config-view';
ta.id = 'config-view';
ta.value = JSON.stringify(currentConfig, null, 2);
container.replaceChild(ta, pre);
document.getElementById('edit-btn').style.display = 'none';
document.getElementById('save-btn').style.display = '';
document.getElementById('cancel-btn').style.display = '';
document.getElementById('config-msg').textContent = '';
}
function cancelEdit() {
editing = false;
var container = document.getElementById('config-container');
var ta = document.getElementById('config-view');
var pre = document.createElement('pre');
pre.className = 'config-view';
pre.id = 'config-view';
pre.textContent = JSON.stringify(currentConfig, null, 2);
container.replaceChild(pre, ta);
document.getElementById('edit-btn').style.display = '';
document.getElementById('save-btn').style.display = 'none';
document.getElementById('cancel-btn').style.display = 'none';
document.getElementById('config-msg').textContent = '';
}
async function saveConfig() {
var msg = document.getElementById('config-msg');
var ta = document.getElementById('config-view');
var text = ta.value;
try {
var parsed = JSON.parse(text);
} catch(e) {
msg.className = 'msg msg-err';
msg.textContent = 'Invalid JSON: ' + e.message;
return;
}
try {
var r = await fetch('/api/config', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(parsed),
});
var data = await r.json();
if (!r.ok) {
msg.className = 'msg msg-err';
msg.textContent = 'Error: ' + (data.error || 'Unknown error');
return;
}
currentConfig = data;
msg.className = 'msg msg-ok';
msg.textContent = 'Config saved successfully';
cancelEdit();
} catch(e) {
msg.className = 'msg msg-err';
msg.textContent = 'Request failed: ' + e.message;
}
}
// Initial load
fetchStatus();
fetchEvents();
fetchConfig();
// Auto-refresh
setInterval(fetchStatus, 30000);
setInterval(fetchEvents, 30000);
</script>
</body>
</html>`;
}

138
src/engine/checks.ts Normal file
View file

@ -0,0 +1,138 @@
import type { AnalysisResult, ChecksBackendConfig, EngineBackend, WebhookEvent } from '../types.js';
import { fetchCheckRuns } from '../github.js';
import { getLogger } from '../logger.js';
// Critical checks that indicate build/test health - failures here are severe
const CRITICAL_PATTERNS = [
/\b(build|compile|ci)\b/i,
/\b(test|jest|pytest|mocha|vitest|cargo.test|go.test|rspec|junit)\b/i,
/\b(typecheck|tsc|mypy|type.check)\b/i,
];
// Advisory checks - useful but not blockers
const ADVISORY_PATTERNS = [
/\b(lint|eslint|clippy|flake8|rubocop|pylint|biome|oxlint)\b/i,
/\b(format|prettier|black|rustfmt|gofmt|fmt)\b/i,
/\b(coverage|codecov|coveralls)\b/i,
/\b(security|snyk|dependabot|codeql|semgrep)\b/i,
/\b(deploy|preview|vercel|netlify)\b/i,
];
function classifyCheck(name: string): 'critical' | 'advisory' | 'standard' {
for (const p of CRITICAL_PATTERNS) {
if (p.test(name)) return 'critical';
}
for (const p of ADVISORY_PATTERNS) {
if (p.test(name)) return 'advisory';
}
return 'standard';
}
export class ChecksBackend implements EngineBackend {
name = 'checks';
constructor(private config: ChecksBackendConfig) {}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
if (event.type !== 'pull_request' || !event.sha) {
return { impact: 'neutral', confidence: 0, reasoning: 'Not a PR or no SHA available.' };
}
let runs;
try {
runs = await fetchCheckRuns(event.owner, event.repo, event.sha);
} catch (err) {
getLogger().warn(
`Failed to fetch check runs for ${event.owner}/${event.repo}@${event.sha}`,
err
);
return { impact: 'neutral', confidence: 0, reasoning: 'Could not fetch CI check results.' };
}
if (runs.length === 0) {
return { impact: 'neutral', confidence: 0, reasoning: 'No CI checks found.' };
}
const completed = runs.filter((r) => r.status === 'completed');
const pending = runs.filter((r) => r.status !== 'completed');
if (completed.length === 0) {
return {
impact: 'neutral',
confidence: 0.1,
reasoning: `CI: ${pending.length} check(s) still running.`,
};
}
const passed = completed.filter((r) => r.conclusion === 'success');
const failed = completed.filter(
(r) =>
r.conclusion === 'failure' || r.conclusion === 'timed_out' || r.conclusion === 'cancelled'
);
const skipped = completed.filter(
(r) => r.conclusion === 'neutral' || r.conclusion === 'skipped'
);
const actionable = completed.length - skipped.length;
if (actionable === 0) {
return { impact: 'neutral', confidence: 0.2, reasoning: 'All CI checks were skipped.' };
}
// Classify failures by severity
const criticalFailures = failed.filter((r) => classifyCheck(r.name) === 'critical');
const advisoryFailures = failed.filter((r) => classifyCheck(r.name) === 'advisory');
const standardFailures = failed.filter(
(r) => classifyCheck(r.name) === 'standard'
);
// Weighted scoring: critical failures count 3x, advisory 0.5x
const failureScore =
criticalFailures.length * 3 + standardFailures.length * 1 + advisoryFailures.length * 0.5;
const totalWeight =
completed
.filter((r) => !skipped.includes(r))
.reduce((s, r) => {
const cls = classifyCheck(r.name);
return s + (cls === 'critical' ? 3 : cls === 'advisory' ? 0.5 : 1);
}, 0);
const weightedPassRate = totalWeight > 0 ? 1 - failureScore / totalWeight : 0;
// Confidence: more checks = more confidence, penalize if some are still pending
let confidence = Math.min(1, actionable / 4 + 0.1);
if (pending.length > 0) {
confidence *= 0.7; // reduce confidence when checks are incomplete
}
let impact: AnalysisResult['impact'];
if (criticalFailures.length > 0) {
impact = 'negative'; // any critical failure is always negative
} else if (failed.length === 0) {
impact = 'positive';
} else if (weightedPassRate >= 0.8) {
impact = 'neutral'; // only advisory/minor failures
} else {
impact = 'negative';
}
// Build detailed reasoning
const parts: string[] = [];
if (passed.length > 0) parts.push(`${passed.length} passed (${passed.map((r) => r.name).join(', ')})`);
if (criticalFailures.length > 0)
parts.push(`${criticalFailures.length} critical failure(s) (${criticalFailures.map((r) => r.name).join(', ')})`);
if (advisoryFailures.length > 0)
parts.push(`${advisoryFailures.length} advisory failure(s) (${advisoryFailures.map((r) => r.name).join(', ')})`);
if (standardFailures.length > 0)
parts.push(`${standardFailures.length} other failure(s) (${standardFailures.map((r) => r.name).join(', ')})`);
if (skipped.length > 0) parts.push(`${skipped.length} skipped`);
if (pending.length > 0) parts.push(`${pending.length} still running`);
const passRate = passed.length / actionable;
return {
impact,
confidence,
reasoning: `CI: ${parts.join('; ')}. Pass rate: ${(passRate * 100).toFixed(0)}% (weighted: ${(weightedPassRate * 100).toFixed(0)}%).`,
};
}
}

202
src/engine/diff.ts Normal file
View file

@ -0,0 +1,202 @@
import type { AnalysisResult, DiffBackendConfig, EngineBackend, WebhookEvent } from '../types.js';
import { fetchPRFiles } from '../github.js';
import { getLogger } from '../logger.js';
const TEST_FILE_PATTERN = /\b(test|spec|__tests__|_test|_spec|\.test\.|\.spec\.)\b/i;
const GENERATED_FILE_PATTERN =
/\b(package-lock|yarn\.lock|pnpm-lock|Cargo\.lock|go\.sum|composer\.lock|Gemfile\.lock|poetry\.lock|flake\.lock)\b|\.min\.(js|css)$|\/vendor\//i;
const CONFIG_FILE_PATTERN =
/\.(ya?ml|toml|ini|env(\.\w+)?|json)$|^\.[\w-]+(rc|ignore)$|Makefile$|Dockerfile$|^\.github\//i;
const RISKY_FILE_PATTERN =
/\b(migration|schema|seed|secret|credential|auth|permission|rbac|\.sql)\b/i;
const DOC_FILE_PATTERN = /\.(md|mdx|txt|rst|adoc)$|^(README|CHANGELOG|LICENSE|CONTRIBUTING)/i;
function categorizeFiles(files: { filename: string; additions: number; deletions: number; changes: number }[]) {
const src: typeof files = [];
const tests: typeof files = [];
const generated: typeof files = [];
const config: typeof files = [];
const docs: typeof files = [];
const risky: typeof files = [];
for (const f of files) {
if (GENERATED_FILE_PATTERN.test(f.filename)) {
generated.push(f);
} else if (TEST_FILE_PATTERN.test(f.filename)) {
tests.push(f);
} else if (DOC_FILE_PATTERN.test(f.filename)) {
docs.push(f);
} else if (CONFIG_FILE_PATTERN.test(f.filename)) {
config.push(f);
} else {
src.push(f);
}
// risky is non-exclusive - a file can be both src and risky
if (RISKY_FILE_PATTERN.test(f.filename)) {
risky.push(f);
}
}
return { src, tests, generated, config, docs, risky };
}
export class DiffBackend implements EngineBackend {
name = 'diff';
constructor(private config: DiffBackendConfig) {}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
if (event.type !== 'pull_request') {
return { impact: 'neutral', confidence: 0, reasoning: 'Not a PR.' };
}
let files;
try {
files = await fetchPRFiles(event.owner, event.repo, event.number);
} catch (err) {
getLogger().warn(
`Failed to fetch PR files for ${event.owner}/${event.repo}#${event.number}`,
err
);
return { impact: 'neutral', confidence: 0, reasoning: 'Could not fetch PR diff.' };
}
if (files.length === 0) {
return { impact: 'neutral', confidence: 0.1, reasoning: 'Empty diff.' };
}
const { src, tests, generated, config, docs, risky } = categorizeFiles(files);
// Exclude generated files from change counts
const meaningful = files.filter((f) => !GENERATED_FILE_PATTERN.test(f.filename));
const totalAdditions = meaningful.reduce((s, f) => s + f.additions, 0);
const totalDeletions = meaningful.reduce((s, f) => s + f.deletions, 0);
const totalChanges = totalAdditions + totalDeletions;
const signals: { name: string; positive: boolean; weight: number }[] = [];
// --- Size signals ---
if (totalChanges <= 50) {
signals.push({ name: 'tiny PR', positive: true, weight: 1.2 });
} else if (totalChanges <= 200) {
signals.push({ name: 'small PR', positive: true, weight: 1 });
} else if (totalChanges <= 500) {
// medium - no signal either way
} else if (totalChanges <= this.config.maxChanges) {
signals.push({ name: `large PR (${totalChanges} lines)`, positive: false, weight: 0.8 });
} else {
signals.push({ name: `very large PR (${totalChanges} lines, exceeds limit)`, positive: false, weight: 1.5 });
}
// --- Focus signals ---
if (src.length <= 3 && src.length > 0) {
signals.push({ name: 'tightly focused', positive: true, weight: 1.2 });
} else if (meaningful.length <= 10) {
signals.push({ name: 'focused changeset', positive: true, weight: 0.8 });
} else if (meaningful.length > 30) {
signals.push({ name: `sprawling changeset (${meaningful.length} files)`, positive: false, weight: 1.2 });
} else if (meaningful.length > 20) {
signals.push({ name: `broad changeset (${meaningful.length} files)`, positive: false, weight: 0.6 });
}
// --- Test coverage ---
if (tests.length > 0 && src.length > 0) {
const testRatio = tests.length / src.length;
if (testRatio >= 0.5) {
signals.push({ name: 'good test coverage in diff', positive: true, weight: 1.5 });
} else {
signals.push({ name: 'includes tests', positive: true, weight: 1 });
}
} else if (tests.length > 0 && src.length === 0) {
signals.push({ name: 'test-only change', positive: true, weight: 1.2 });
} else if (this.config.requireTests && src.length > 0 && totalChanges > 50) {
signals.push({ name: 'no test changes for non-trivial PR', positive: false, weight: 1.3 });
}
// --- Net deletion ---
if (totalDeletions > totalAdditions && totalDeletions > 10) {
const ratio = totalDeletions / Math.max(totalAdditions, 1);
if (ratio > 3) {
signals.push({ name: 'significant code removal', positive: true, weight: 1.3 });
} else {
signals.push({ name: 'net code removal', positive: true, weight: 1 });
}
}
// --- Churn detection (files with high add+delete suggesting rewrites) ---
const highChurnFiles = src.filter(
(f) => f.additions > 50 && f.deletions > 50 && Math.min(f.additions, f.deletions) / Math.max(f.additions, f.deletions) > 0.6
);
if (highChurnFiles.length >= 3) {
signals.push({ name: `high churn in ${highChurnFiles.length} files (possible refactor)`, positive: false, weight: 0.5 });
}
// --- Risky files ---
if (risky.length > 0) {
signals.push({
name: `touches sensitive files (${risky.map((f) => f.filename.split('/').pop()).join(', ')})`,
positive: false,
weight: 0.7,
});
}
// --- Documentation ---
if (docs.length > 0 && src.length > 0) {
signals.push({ name: 'includes docs updates', positive: true, weight: 0.6 });
} else if (docs.length > 0 && src.length === 0) {
signals.push({ name: 'docs-only change', positive: true, weight: 1 });
}
// --- Config-only ---
if (config.length > 0 && src.length === 0 && tests.length === 0) {
signals.push({ name: 'config/infra only', positive: true, weight: 0.8 });
}
// --- Generated file noise ---
if (generated.length > 0) {
const genChanges = generated.reduce((s, f) => s + f.changes, 0);
if (genChanges > totalChanges * 2) {
signals.push({ name: 'dominated by generated file changes', positive: false, weight: 0.4 });
}
}
// --- Scoring with weights ---
const positiveWeight = signals.filter((s) => s.positive).reduce((s, x) => s + x.weight, 0);
const negativeWeight = signals.filter((s) => !s.positive).reduce((s, x) => s + x.weight, 0);
let impact: AnalysisResult['impact'];
if (positiveWeight > negativeWeight * 1.1) {
impact = 'positive';
} else if (negativeWeight > positiveWeight * 1.1) {
impact = 'negative';
} else {
impact = 'neutral';
}
const totalSignalWeight = positiveWeight + negativeWeight;
const confidence =
signals.length > 0
? Math.min(1, Math.abs(positiveWeight - negativeWeight) / Math.max(totalSignalWeight, 1) * 0.6 + 0.25)
: 0;
// Build reasoning
const breakdown: string[] = [];
if (src.length > 0) breakdown.push(`${src.length} source`);
if (tests.length > 0) breakdown.push(`${tests.length} test`);
if (config.length > 0) breakdown.push(`${config.length} config`);
if (docs.length > 0) breakdown.push(`${docs.length} docs`);
if (generated.length > 0) breakdown.push(`${generated.length} generated`);
const fileSummary = `${meaningful.length} files (${breakdown.join(', ')})`;
const reasoning =
signals.length > 0
? `Diff: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}. ${totalAdditions}+ ${totalDeletions}- across ${fileSummary}.`
: 'No diff signals.';
return { impact, confidence, reasoning };
}
}

117
src/engine/index.ts Normal file
View file

@ -0,0 +1,117 @@
import type {
AnalysisResult,
EngineBackend,
EngineConfig,
Impact,
WebhookEvent,
} from '../types.js';
import { ChecksBackend } from './checks.js';
import { DiffBackend } from './diff.js';
import { QualityBackend } from './quality.js';
import { getLogger } from '../logger.js';
const impactToNumeric: Record<Impact, number> = {
positive: 1,
neutral: 0,
negative: -1,
};
interface WeightedBackend {
backend: EngineBackend;
weight: number;
}
export class Engine {
private backends: WeightedBackend[] = [];
private confidenceThreshold: number;
constructor(config: EngineConfig) {
this.confidenceThreshold = config.confidenceThreshold;
if (config.backends.checks.enabled) {
this.backends.push({
backend: new ChecksBackend(config.backends.checks),
weight: config.weights.checks,
});
}
if (config.backends.diff.enabled) {
this.backends.push({
backend: new DiffBackend(config.backends.diff),
weight: config.weights.diff,
});
}
if (config.backends.quality.enabled) {
this.backends.push({
backend: new QualityBackend(config.backends.quality),
weight: config.weights.quality,
});
}
if (this.backends.length === 0) {
throw new Error('No engine backends enabled');
}
}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
const logger = getLogger();
const results = await Promise.all(
this.backends.map(async ({ backend, weight }) => {
try {
const result = await backend.analyze(event);
logger.debug(
`Backend "${backend.name}": impact=${result.impact}, confidence=${result.confidence.toFixed(2)}`
);
return { result, weight };
} catch (err) {
logger.error(`Backend "${backend.name}" threw unexpectedly`, err);
return {
result: {
impact: 'neutral' as Impact,
confidence: 0,
reasoning: `${backend.name}: error`,
},
weight,
};
}
})
);
// Filter to backends that actually produced a signal (confidence > 0)
const active = results.filter((r) => r.result.confidence > 0);
if (active.length === 0) {
return { impact: 'neutral', confidence: 0, reasoning: 'No backends produced a signal.' };
}
const totalWeight = active.reduce((s, r) => s + r.weight, 0);
const combinedScore =
active.reduce(
(s, r) => s + impactToNumeric[r.result.impact] * r.result.confidence * r.weight,
0
) / totalWeight;
const combinedConfidence =
active.reduce((s, r) => s + r.result.confidence * r.weight, 0) / totalWeight;
let impact: Impact;
if (combinedScore > 0.1) {
impact = 'positive';
} else if (combinedScore < -0.1) {
impact = 'negative';
} else {
impact = 'neutral';
}
if (combinedConfidence < this.confidenceThreshold) {
impact = 'neutral';
}
const reasoning = results
.filter((r) => r.result.confidence > 0)
.map((r) => r.result.reasoning)
.join(' ');
return { impact, confidence: combinedConfidence, reasoning };
}
}
export function createEngine(config: EngineConfig): Engine {
return new Engine(config);
}

179
src/engine/quality.ts Normal file
View file

@ -0,0 +1,179 @@
import type {
AnalysisResult,
EngineBackend,
QualityBackendConfig,
WebhookEvent,
} from '../types.js';
// Conventional commit prefixes
const CONVENTIONAL_COMMIT =
/^(feat|fix|docs|style|refactor|perf|test|build|ci|chore|revert)(\(.+\))?!?:\s/i;
const WIP_PATTERN = /\b(wip|work.in.progress|do.not.merge|don't.merge|draft)\b/i;
const BREAKING_PATTERN = /\b(breaking.change|BREAKING)\b/i;
const TODO_PATTERN = /\b(TODO|FIXME|HACK|XXX|TEMP)\b/;
export class QualityBackend implements EngineBackend {
name = 'quality';
constructor(private config: QualityBackendConfig) {}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
const body = event.body.trim();
const title = event.title.trim();
const signals: { name: string; positive: boolean; weight: number }[] = [];
// --- Title analysis ---
if (title.length < 10) {
signals.push({ name: 'very short title', positive: false, weight: 1.2 });
} else if (title.length > 200) {
signals.push({ name: 'excessively long title', positive: false, weight: 0.5 });
}
if (CONVENTIONAL_COMMIT.test(title)) {
signals.push({ name: 'conventional commit format', positive: true, weight: 1 });
}
if (WIP_PATTERN.test(title) || WIP_PATTERN.test(body)) {
signals.push({ name: 'marked as work-in-progress', positive: false, weight: 1.5 });
}
// --- Body analysis ---
if (body.length === 0) {
signals.push({ name: 'empty description', positive: false, weight: 2 });
} else if (body.length < this.config.minBodyLength) {
signals.push({ name: `short description (${body.length} chars)`, positive: false, weight: 1.2 });
} else if (body.length >= this.config.minBodyLength) {
signals.push({ name: 'adequate description', positive: true, weight: 1 });
if (body.length > 300) {
signals.push({ name: 'thorough description', positive: true, weight: 0.5 });
}
}
if (/```[\s\S]*?```/.test(body)) {
signals.push({ name: 'has code blocks', positive: true, weight: 0.7 });
}
if (/^#{1,6}\s/m.test(body)) {
signals.push({ name: 'has section headers', positive: true, weight: 0.8 });
}
// Checklists
const checklistItems = body.match(/^[\s]*-\s*\[[ x]\]/gm);
if (checklistItems) {
const checked = checklistItems.filter((i) => /\[x\]/i.test(i)).length;
const total = checklistItems.length;
if (total > 0 && checked === total) {
signals.push({ name: `checklist complete (${total}/${total})`, positive: true, weight: 1 });
} else if (total > 0) {
signals.push({ name: `checklist incomplete (${checked}/${total})`, positive: false, weight: 0.8 });
}
}
// Breaking changes
if (BREAKING_PATTERN.test(title) || BREAKING_PATTERN.test(body)) {
// Not inherently positive or negative, but we flag it for visibility.
// If there's a description of the breaking change, it's better.
if (body.length > 100 && BREAKING_PATTERN.test(body)) {
signals.push({ name: 'breaking change documented', positive: true, weight: 0.8 });
} else {
signals.push({ name: 'breaking change mentioned but not detailed', positive: false, weight: 0.8 });
}
}
// TODOs/FIXMEs in description suggest unfinished work
const todoMatches = body.match(TODO_PATTERN);
if (todoMatches) {
signals.push({ name: `unfinished markers in description (${todoMatches.length})`, positive: false, weight: 0.6 });
}
// --- Type-specific signals ---
if (event.type === 'issue') {
if (/\b(steps?\s+to\s+reproduce|reproduction|repro\s+steps?)\b/i.test(body)) {
signals.push({ name: 'has reproduction steps', positive: true, weight: 1.3 });
}
if (/\b(expected|actual)\s+(behavior|behaviour|result|output)\b/i.test(body)) {
signals.push({ name: 'has expected/actual behavior', positive: true, weight: 1.2 });
}
if (/\b(version|environment|os|platform|browser|node|python|java|rust|go)\s*[:\d]/i.test(body)) {
signals.push({ name: 'has environment details', positive: true, weight: 1 });
}
if (/\b(stack\s*trace|traceback|error|exception|panic)\b/i.test(body)) {
signals.push({ name: 'includes error output', positive: true, weight: 0.8 });
}
// Template usage detection (common issue template markers)
if (/\b(describe the bug|feature request|is your feature request related to)\b/i.test(body)) {
signals.push({ name: 'uses issue template', positive: true, weight: 0.6 });
}
}
if (event.type === 'pull_request') {
if (/\b(fix(es)?|clos(es|ing)|resolv(es|ing))\s+#\d+/i.test(body)) {
signals.push({ name: 'links to issue', positive: true, weight: 1.3 });
}
if (/\b(test\s*(plan|strategy|coverage)|how\s+to\s+test|testing|tested\s+by)\b/i.test(body)) {
signals.push({ name: 'has test plan', positive: true, weight: 1.2 });
}
// Migration or upgrade guide
if (/\b(migration|upgrade|breaking).*(guide|instruction|step)/i.test(body)) {
signals.push({ name: 'has migration guide', positive: true, weight: 1 });
}
// Before/after comparison
if (/\b(before|after)\b/i.test(body) && /\b(before|after)\b/gi.test(body)) {
const beforeAfter = body.match(/\b(before|after)\b/gi);
if (beforeAfter && beforeAfter.length >= 2) {
signals.push({ name: 'has before/after comparison', positive: true, weight: 0.7 });
}
}
}
// Shared: references to other issues/PRs
const refs = body.match(/#\d+/g);
if (refs && refs.length > 0) {
signals.push({ name: `references ${refs.length} issue(s)/PR(s)`, positive: true, weight: 0.6 });
}
// Screenshots or images
if (/!\[.*\]\(.*\)/.test(body) || /<img\s/i.test(body)) {
signals.push({ name: 'has images/screenshots', positive: true, weight: 0.8 });
}
// --- Weighted scoring ---
if (signals.length === 0) {
return { impact: 'neutral', confidence: 0.1, reasoning: 'No quality signals detected.' };
}
const positiveWeight = signals.filter((s) => s.positive).reduce((s, x) => s + x.weight, 0);
const negativeWeight = signals.filter((s) => !s.positive).reduce((s, x) => s + x.weight, 0);
let impact: AnalysisResult['impact'];
if (positiveWeight > negativeWeight * 1.2) {
impact = 'positive';
} else if (negativeWeight > positiveWeight * 1.2) {
impact = 'negative';
} else {
impact = 'neutral';
}
const totalWeight = positiveWeight + negativeWeight;
const confidence = Math.min(
1,
Math.abs(positiveWeight - negativeWeight) / Math.max(totalWeight, 1) * 0.5 + 0.2
);
const reasoning = `Quality: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}.`;
return { impact, confidence, reasoning };
}
}

40
src/events.ts Normal file
View file

@ -0,0 +1,40 @@
import type { WebhookEvent, AnalysisResult } from './types.js';
export interface EventEntry {
id: number;
timestamp: string;
event: WebhookEvent;
result: Record<string, unknown>;
analysis?: AnalysisResult;
}
const MAX_ENTRIES = 100;
const buffer: EventEntry[] = [];
let nextId = 1;
export function recordEvent(
event: WebhookEvent,
result: Record<string, unknown>,
analysis?: AnalysisResult
): void {
const entry: EventEntry = {
id: nextId++,
timestamp: new Date().toISOString(),
event,
result,
analysis,
};
buffer.push(entry);
if (buffer.length > MAX_ENTRIES) {
buffer.shift();
}
}
export function getRecentEvents(): EventEntry[] {
return [...buffer].reverse();
}
export function clearEvents(): void {
buffer.length = 0;
}

43
src/filters.ts Normal file
View file

@ -0,0 +1,43 @@
import type { FiltersConfig, WebhookEvent } from './types.js';
export function shouldProcess(
event: WebhookEvent,
filters: FiltersConfig
): { pass: boolean; reason?: string } {
// Label filters
if (filters.labels.include.length > 0) {
const hasRequired = event.labels.some((l) => filters.labels.include.includes(l));
if (!hasRequired) {
return { pass: false, reason: 'Missing required label' };
}
}
if (filters.labels.exclude.length > 0) {
const hasExcluded = event.labels.some((l) => filters.labels.exclude.includes(l));
if (hasExcluded) {
return { pass: false, reason: 'Has excluded label' };
}
}
// Author filters
if (filters.authors.include && filters.authors.include.length > 0) {
if (!filters.authors.include.includes(event.author)) {
return { pass: false, reason: 'Author not in include list' };
}
}
if (filters.authors.exclude.length > 0) {
if (filters.authors.exclude.includes(event.author)) {
return { pass: false, reason: 'Author is excluded' };
}
}
// Branch filters (PRs only)
if (event.branch && filters.branches.include.length > 0) {
if (!filters.branches.include.includes(event.branch)) {
return { pass: false, reason: 'Branch not in include list' };
}
}
return { pass: true };
}

187
src/github.ts Normal file
View file

@ -0,0 +1,187 @@
import { Octokit } from '@octokit/rest';
import { getLogger } from './logger.js';
import type { CheckRun, PRFile, ResponseConfig } from './types.js';
let octokit: Octokit | null = null;
export function initGitHub(token?: string): void {
if (!token) {
getLogger().warn('No GITHUB_TOKEN set - running in dry-run mode, comments will not be posted');
return;
}
octokit = new Octokit({ auth: token });
}
export function isDryRun(): boolean {
return octokit === null;
}
// --- Comment operations ---
export async function postComment(
owner: string,
repo: string,
issueNumber: number,
body: string
): Promise<void> {
if (!octokit) {
getLogger().info(`[dry-run] Would post comment on ${owner}/${repo}#${issueNumber}:\n${body}`);
return;
}
await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, body });
getLogger().info(`Posted comment on ${owner}/${repo}#${issueNumber}`);
}
export async function hasExistingComment(
owner: string,
repo: string,
issueNumber: number,
marker: string
): Promise<{ exists: boolean; commentId?: number }> {
if (!octokit) {
return { exists: false };
}
const comments = await octokit.paginate(octokit.issues.listComments, {
owner,
repo,
issue_number: issueNumber,
per_page: 100,
});
const existing = comments.find((c) => c.body?.includes(marker));
if (existing) {
return { exists: true, commentId: existing.id };
}
return { exists: false };
}
export async function updateComment(
owner: string,
repo: string,
commentId: number,
body: string
): Promise<void> {
if (!octokit) {
getLogger().info(`[dry-run] Would update comment ${commentId}:\n${body}`);
return;
}
await octokit.issues.updateComment({ owner, repo, comment_id: commentId, body });
getLogger().info(`Updated comment ${commentId} on ${owner}/${repo}`);
}
// --- Data fetching for engine backends ---
export async function fetchCheckRuns(
owner: string,
repo: string,
ref: string
): Promise<CheckRun[]> {
if (!octokit) {
getLogger().debug('[dry-run] Cannot fetch check runs without a token');
return [];
}
const response = await octokit.checks.listForRef({
owner,
repo,
ref,
per_page: 100,
});
return response.data.check_runs.map((run) => ({
name: run.name,
status: run.status,
conclusion: run.conclusion,
}));
}
export async function fetchPRFiles(
owner: string,
repo: string,
prNumber: number
): Promise<PRFile[]> {
if (!octokit) {
getLogger().debug('[dry-run] Cannot fetch PR files without a token');
return [];
}
const files = await octokit.paginate(octokit.pulls.listFiles, {
owner,
repo,
pull_number: prNumber,
per_page: 100,
});
return files.map((f) => ({
filename: f.filename,
additions: f.additions,
deletions: f.deletions,
changes: f.changes,
}));
}
export async function fetchPR(
owner: string,
repo: string,
prNumber: number
): Promise<{
title: string;
body: string;
author: string;
labels: string[];
branch: string;
sha: string;
} | null> {
if (!octokit) return null;
const { data } = await octokit.pulls.get({ owner, repo, pull_number: prNumber });
return {
title: data.title,
body: data.body || '',
author: data.user?.login || '',
labels: (data.labels || []).map((l) => (typeof l === 'string' ? l : l.name || '')),
branch: data.head.ref,
sha: data.head.sha,
};
}
// --- Comment formatting ---
function pickRandom(list: string[]): string {
return list[Math.floor(Math.random() * list.length)];
}
export function formatComment(
responseConfig: ResponseConfig,
type: 'issue' | 'pull_request',
impact: string,
confidence: number,
reasoning: string
): string {
const typeLabel = type === 'pull_request' ? 'pull request' : 'issue';
const { messages } = responseConfig;
let messageList: string[];
if (impact === 'positive') {
messageList = messages.positive;
} else if (impact === 'negative') {
messageList = messages.negative;
} else {
messageList = messages.neutral;
}
const template = pickRandom(messageList);
let body = responseConfig.commentMarker + '\n\n';
body += template.replace(/\{type\}/g, typeLabel).replace(/\{impact\}/g, impact);
if (responseConfig.includeConfidence) {
body += `\n\n**Confidence:** ${(confidence * 100).toFixed(0)}%`;
}
if (responseConfig.includeReasoning) {
body += `\n\n**Analysis:** ${reasoning}`;
}
return body;
}

167
src/index.ts Normal file
View file

@ -0,0 +1,167 @@
import { loadConfig } from './config.js';
import { initLogger, getLogger } from './logger.js';
import {
initGitHub,
fetchPR,
hasExistingComment,
postComment,
updateComment,
formatComment,
} from './github.js';
import { createApp } from './server.js';
import { createEngine } from './engine/index.js';
import type { WebhookEvent } from './types.js';
async function analyzeOne(target: string) {
const match = target.match(/^([^/]+)\/([^#]+)#(\d+)$/);
if (!match) {
console.error('Usage: troutbot analyze <owner/repo#number>');
process.exit(1);
}
const [, owner, repo, numStr] = match;
const prNumber = parseInt(numStr, 10);
const config = loadConfig();
initLogger(config.logging);
const logger = getLogger();
initGitHub(process.env.GITHUB_TOKEN);
if (!process.env.GITHUB_TOKEN) {
logger.error('GITHUB_TOKEN is required for analyze mode');
process.exit(1);
}
const prData = await fetchPR(owner, repo, prNumber);
if (!prData) {
logger.error(`Could not fetch PR ${owner}/${repo}#${prNumber}`);
process.exit(1);
}
const event: WebhookEvent = {
action: 'analyze',
type: 'pull_request',
number: prNumber,
title: prData.title,
body: prData.body,
owner,
repo,
author: prData.author,
labels: prData.labels,
branch: prData.branch,
sha: prData.sha,
};
const engine = createEngine(config.engine);
const analysis = await engine.analyze(event);
logger.info(
`Analyzed ${owner}/${repo}#${prNumber}: impact=${analysis.impact}, confidence=${analysis.confidence.toFixed(2)}`
);
logger.info(`Reasoning: ${analysis.reasoning}`);
const { commentMarker, allowUpdates } = config.response;
const existing = await hasExistingComment(owner, repo, prNumber, commentMarker);
if (existing.exists && !allowUpdates) {
logger.info(`Already commented on ${owner}/${repo}#${prNumber}, skipping`);
return;
}
const body = formatComment(
config.response,
event.type,
analysis.impact,
analysis.confidence,
analysis.reasoning
);
if (existing.exists && allowUpdates && existing.commentId) {
await updateComment(owner, repo, existing.commentId, body);
} else {
await postComment(owner, repo, prNumber, body);
}
}
function serve() {
const config = loadConfig();
initLogger(config.logging);
const logger = getLogger();
initGitHub(process.env.GITHUB_TOKEN);
if (!process.env.GITHUB_TOKEN) {
logger.warn(
'No GITHUB_TOKEN - running in dry-run mode (checks and diff backends will be inactive)'
);
}
if (!process.env.WEBHOOK_SECRET) {
logger.warn('No WEBHOOK_SECRET - webhook signature verification is disabled');
}
const app = createApp(config);
const port = config.server.port;
const enabledBackends = Object.entries(config.engine.backends)
.filter(([, v]) => v.enabled)
.map(([k]) => k);
const server = app.listen(port, () => {
logger.info(`Troutbot listening on port ${port}`);
logger.info(`Enabled backends: ${enabledBackends.join(', ')}`);
// Watched repos
if (config.repositories.length > 0) {
const repos = config.repositories.map((r) => `${r.owner}/${r.repo}`).join(', ');
logger.info(`Watched repos: ${repos}`);
} else {
logger.info('Watched repos: all (no repository filter)');
}
// Active filters (only log non-empty ones)
const { filters } = config;
if (filters.labels.include.length > 0)
logger.info(`Label include filter: ${filters.labels.include.join(', ')}`);
if (filters.labels.exclude.length > 0)
logger.info(`Label exclude filter: ${filters.labels.exclude.join(', ')}`);
if (filters.authors.exclude.length > 0)
logger.info(`Excluded authors: ${filters.authors.exclude.join(', ')}`);
if (filters.branches.include.length > 0)
logger.info(`Branch filter: ${filters.branches.include.join(', ')}`);
// Engine weights and confidence threshold
const { weights, confidenceThreshold } = config.engine;
logger.info(
`Engine weights: checks=${weights.checks}, diff=${weights.diff}, quality=${weights.quality} | threshold=${confidenceThreshold}`
);
// Comment update mode
logger.info(`Comment updates: ${config.response.allowUpdates ? 'enabled' : 'disabled'}`);
logger.info(`Dashboard available at http://localhost:${port}/dashboard`);
});
function shutdown(signal: string) {
logger.info(`Received ${signal}, shutting down gracefully...`);
server.close(() => {
logger.info('Server closed');
process.exit(0);
});
setTimeout(() => {
logger.warn('Graceful shutdown timed out, forcing exit');
process.exit(1);
}, 10_000).unref();
}
process.on('SIGTERM', () => shutdown('SIGTERM'));
process.on('SIGINT', () => shutdown('SIGINT'));
}
const args = process.argv.slice(2);
if (args[0] === 'analyze' && args[1]) {
analyzeOne(args[1]).catch((err) => {
console.error(err);
process.exit(1);
});
} else {
serve();
}

53
src/logger.ts Normal file
View file

@ -0,0 +1,53 @@
import winston from 'winston';
import type { LoggingConfig } from './types.js';
const redactSecrets = winston.format((info) => {
const msg = info.message as string;
if (typeof msg === 'string') {
info.message = msg
.replace(/ghp_[a-zA-Z0-9]{36,}/g, 'ghp_***REDACTED***')
.replace(/(?<=secret[=: ]+)[^\s,}]+/gi, '***REDACTED***');
}
return info;
});
let logger: winston.Logger;
export function initLogger(config: LoggingConfig): winston.Logger {
const transports: winston.transport[] = [
new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
}),
];
if (config.file) {
transports.push(
new winston.transports.File({
filename: config.file,
format: winston.format.combine(winston.format.timestamp(), winston.format.json()),
})
);
}
logger = winston.createLogger({
level: config.level || 'info',
format: winston.format.combine(
redactSecrets(),
winston.format.timestamp(),
winston.format.errors({ stack: true })
),
transports,
});
return logger;
}
export function getLogger(): winston.Logger {
if (!logger) {
logger = winston.createLogger({
level: 'info',
transports: [new winston.transports.Console()],
});
}
return logger;
}

289
src/server.ts Normal file
View file

@ -0,0 +1,289 @@
import crypto from 'node:crypto';
import express from 'express';
import rateLimit from 'express-rate-limit';
import type { Config, WebhookEvent, AnalysisResult } from './types.js';
import { shouldProcess } from './filters.js';
import { createEngine } from './engine/index.js';
import {
fetchPR,
formatComment,
hasExistingComment,
postComment,
updateComment,
} from './github.js';
import { getLogger } from './logger.js';
import { recordEvent } from './events.js';
import { createDashboardRouter } from './dashboard.js';
const startTime = Date.now();
export function createApp(config: Config): express.Express {
const app = express();
const logger = getLogger();
const engine = createEngine(config.engine);
app.use(
express.json({
limit: '1mb',
verify: (req, _res, buf) => {
(req as unknown as Record<string, Buffer>).rawBody = buf;
},
})
);
app.use((_req, res, next) => {
res.setTimeout(30_000, () => {
logger.warn('Response timeout reached (30s)');
if (!res.headersSent) {
res.status(504).json({ error: 'Response timeout' });
}
});
next();
});
const webhookLimiter = rateLimit({
windowMs: 60_000,
limit: config.server.rateLimit ?? 120,
standardHeaders: 'draft-7',
legacyHeaders: false,
message: { error: 'Too many requests, please try again later' },
});
const enabledBackends = Object.entries(config.engine.backends)
.filter(([, v]) => v.enabled)
.map(([k]) => k);
app.get('/health', (_req, res) => {
res.json({
status: 'ok',
uptime: Math.floor((Date.now() - startTime) / 1000),
version: process.env.npm_package_version ?? 'unknown',
dryRun: !process.env.GITHUB_TOKEN,
backends: enabledBackends,
});
});
app.post('/webhook', webhookLimiter, async (req, res) => {
try {
// Signature verification
const secret = process.env.WEBHOOK_SECRET;
if (secret) {
const signature = req.headers['x-hub-signature-256'] as string | undefined;
if (!signature) {
logger.warn('Missing webhook signature');
res.status(401).json({ error: 'Missing signature' });
return;
}
const rawBody = (req as unknown as Record<string, Buffer>).rawBody;
const expected =
'sha256=' + crypto.createHmac('sha256', secret).update(rawBody).digest('hex');
if (!crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expected))) {
logger.warn('Invalid webhook signature');
res.status(401).json({ error: 'Invalid signature' });
return;
}
}
const eventType = req.headers['x-github-event'] as string;
const payload = req.body;
// Handle check_suite completion - re-analyze associated PRs
if (eventType === 'check_suite' && payload.action === 'completed') {
await handleCheckSuiteCompleted(payload, config, engine);
res.json({ processed: true, event: 'check_suite' });
return;
}
if (eventType !== 'issues' && eventType !== 'pull_request') {
res.json({ skipped: true, reason: `Unhandled event: ${eventType}` });
return;
}
const action = payload.action as string;
if (!['opened', 'edited', 'synchronize'].includes(action)) {
res.json({ skipped: true, reason: `Unhandled action: ${action}` });
return;
}
const event = parseEvent(eventType, payload);
if (!event) {
res.json({ skipped: true, reason: 'Could not parse event' });
return;
}
const result = await analyzeAndComment(event, config, engine);
res.json(result);
} catch (err) {
logger.error('Error processing webhook', err);
res.status(500).json({ error: 'Internal server error' });
}
});
app.use(createDashboardRouter(config));
return app;
}
async function analyzeAndComment(
event: WebhookEvent,
config: Config,
engine: ReturnType<typeof createEngine>
): Promise<Record<string, unknown>> {
const logger = getLogger();
// Check if repo is configured
if (config.repositories.length > 0) {
const repoMatch = config.repositories.some(
(r) => r.owner === event.owner && r.repo === event.repo
);
if (!repoMatch) {
logger.debug(`Ignoring event for unconfigured repo ${event.owner}/${event.repo}`);
const result = { skipped: true, reason: 'Repository not configured' };
recordEvent(event, result);
return result;
}
}
// Apply filters
const filterResult = shouldProcess(event, config.filters);
if (!filterResult.pass) {
logger.debug(`Filtered out: ${filterResult.reason}`);
const result = { skipped: true, reason: filterResult.reason };
recordEvent(event, result);
return result;
}
// Run analysis
const analysis = await engine.analyze(event);
logger.info(
`Analyzed ${event.owner}/${event.repo}#${event.number}: impact=${analysis.impact}, confidence=${analysis.confidence.toFixed(2)}`
);
// Check for existing comment
const { commentMarker, allowUpdates } = config.response;
const existing = await hasExistingComment(event.owner, event.repo, event.number, commentMarker);
if (existing.exists && !allowUpdates) {
logger.info(`Already commented on ${event.owner}/${event.repo}#${event.number}, skipping`);
const result = { skipped: true, reason: 'Already commented' };
recordEvent(event, result, analysis);
return result;
}
const body = formatComment(
config.response,
event.type,
analysis.impact,
analysis.confidence,
analysis.reasoning
);
if (existing.exists && allowUpdates && existing.commentId) {
await updateComment(event.owner, event.repo, existing.commentId, body);
} else {
await postComment(event.owner, event.repo, event.number, body);
}
const result = { processed: true, impact: analysis.impact, confidence: analysis.confidence };
recordEvent(event, result, analysis);
return result;
}
async function handleCheckSuiteCompleted(
payload: Record<string, unknown>,
config: Config,
engine: ReturnType<typeof createEngine>
): Promise<void> {
const logger = getLogger();
if (!config.response.allowUpdates) {
logger.debug('check_suite received but allowUpdates is false, skipping');
return;
}
const checkSuite = payload.check_suite as Record<string, unknown>;
const pullRequests = (checkSuite.pull_requests as Array<Record<string, unknown>>) || [];
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
const repoName = repo.name as string;
for (const pr of pullRequests) {
const prNumber = pr.number as number;
logger.info(`Re-analyzing ${owner}/${repoName}#${prNumber} after check_suite completed`);
try {
const prData = await fetchPR(owner, repoName, prNumber);
if (!prData) {
logger.warn(`Could not fetch PR ${owner}/${repoName}#${prNumber}`);
continue;
}
const event: WebhookEvent = {
action: 'check_suite_completed',
type: 'pull_request',
number: prNumber,
title: prData.title,
body: prData.body,
owner,
repo: repoName,
author: prData.author,
labels: prData.labels,
branch: prData.branch,
sha: prData.sha,
};
await analyzeAndComment(event, config, engine);
} catch (err) {
logger.error(`Failed to re-analyze PR ${owner}/${repoName}#${prNumber}`, err);
}
}
}
function parseEvent(eventType: string, payload: Record<string, unknown>): WebhookEvent | null {
try {
if (eventType === 'issues') {
const issue = payload.issue as Record<string, unknown>;
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
return {
action: payload.action as string,
type: 'issue',
number: issue.number as number,
title: (issue.title as string) || '',
body: (issue.body as string) || '',
owner,
repo: repo.name as string,
author: (issue.user as Record<string, unknown>).login as string,
labels: ((issue.labels as Array<Record<string, unknown>>) || []).map(
(l) => l.name as string
),
};
}
if (eventType === 'pull_request') {
const pr = payload.pull_request as Record<string, unknown>;
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
const head = pr.head as Record<string, unknown>;
return {
action: payload.action as string,
type: 'pull_request',
number: pr.number as number,
title: (pr.title as string) || '',
body: (pr.body as string) || '',
owner,
repo: repo.name as string,
author: (pr.user as Record<string, unknown>).login as string,
labels: ((pr.labels as Array<Record<string, unknown>>) || []).map((l) => l.name as string),
branch: head.ref as string,
sha: head.sha as string,
};
}
return null;
} catch {
return null;
}
}

122
src/types.ts Normal file
View file

@ -0,0 +1,122 @@
export interface Config {
server: ServerConfig;
repositories: RepoConfig[];
filters: FiltersConfig;
engine: EngineConfig;
response: ResponseConfig;
logging: LoggingConfig;
}
export interface ServerConfig {
port: number;
rateLimit?: number;
}
export interface RepoConfig {
owner: string;
repo: string;
}
export interface FiltersConfig {
labels: {
include: string[];
exclude: string[];
};
authors: {
include?: string[];
exclude: string[];
};
branches: {
include: string[];
};
}
export interface EngineConfig {
backends: BackendsConfig;
weights: BackendWeights;
confidenceThreshold: number;
}
export interface BackendsConfig {
checks: ChecksBackendConfig;
diff: DiffBackendConfig;
quality: QualityBackendConfig;
}
export interface ChecksBackendConfig {
enabled: boolean;
}
export interface DiffBackendConfig {
enabled: boolean;
maxChanges: number;
requireTests: boolean;
}
export interface QualityBackendConfig {
enabled: boolean;
minBodyLength: number;
}
export interface BackendWeights {
checks: number;
diff: number;
quality: number;
}
export interface ResponseConfig {
includeConfidence: boolean;
includeReasoning: boolean;
messages: {
positive: string[];
negative: string[];
neutral: string[];
};
commentMarker: string;
allowUpdates: boolean;
}
export interface LoggingConfig {
level: string;
file: string;
}
export type Impact = 'positive' | 'negative' | 'neutral';
export interface AnalysisResult {
impact: Impact;
confidence: number;
reasoning: string;
}
export interface EngineBackend {
name: string;
analyze(event: WebhookEvent): Promise<AnalysisResult>;
}
export interface WebhookEvent {
action: string;
type: 'issue' | 'pull_request';
number: number;
title: string;
body: string;
owner: string;
repo: string;
author: string;
labels: string[];
branch?: string;
sha?: string;
}
export interface CheckRun {
name: string;
status: string;
conclusion: string | null;
}
export interface PRFile {
filename: string;
additions: number;
deletions: number;
changes: number;
}

17
tsconfig.json Normal file
View file

@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "bundler",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"resolveJsonModule": true
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}