initial commit

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ic08e7c4b5b4f4072de9e2f9a701e977b6a6a6964
This commit is contained in:
raf 2026-01-30 16:46:39 +03:00
commit d95fa2c8e5
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
19 changed files with 5186 additions and 0 deletions

5
.env.example Normal file
View file

@ -0,0 +1,5 @@
GITHUB_TOKEN=ghp_your_personal_access_token
WEBHOOK_SECRET=your_webhook_secret
PORT=3000
CONFIG_PATH=config.ts
LOG_LEVEL=info

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
node_modules/
dist/

94
config.example.ts Normal file
View file

@ -0,0 +1,94 @@
import type { Config } from './src/types';
const config: Config = {
server: {
port: 3000,
},
repositories: [
// Leave empty to accept webhooks from any repo.
// { owner: "myorg", repo: "myrepo" },
],
filters: {
labels: {
include: [],
exclude: ['bot-ignore'],
},
authors: {
exclude: ['dependabot', 'renovate[bot]'],
},
branches: {
include: [], // empty = all branches
},
},
engine: {
backends: {
// Queries GitHub Checks API for CI results (ESLint, Clippy, tests, builds).
// Requires GITHUB_TOKEN.
checks: { enabled: true },
// Analyzes PR diff: size, file count, test coverage, net additions/deletions.
// Requires GITHUB_TOKEN. Only applies to pull_request events.
diff: {
enabled: true,
maxChanges: 1000, // PRs above this line count are flagged as too large
requireTests: false, // set true to flag PRs with no test file changes
},
// Analyzes issue/PR body for structural quality: description length,
// code blocks, reproduction steps, linked issues, test plans.
// Works without a token (pure text analysis).
quality: {
enabled: true,
minBodyLength: 50, // minimum characters for an "adequate" description
},
},
// Relative importance of each backend when combining results.
weights: {
checks: 0.4,
diff: 0.3,
quality: 0.3,
},
// Below this combined confidence, classify as neutral.
confidenceThreshold: 0.1,
},
response: {
includeConfidence: true,
includeReasoning: true,
// One message is picked at random from the list matching the impact.
// Placeholders: {type} (issue/pull request), {impact} (positive/negative/neutral)
messages: {
positive: [
'This {type} looks great for the trout! All signals point upstream.',
'The trout approve of this {type}. Swim on!',
'Splashing good news — this {type} is looking healthy.',
],
negative: [
'This {type} is muddying the waters. The trout are concerned.',
'Warning: the trout sense trouble in this {type}.',
'Something smells fishy about this {type}. Please review.',
],
neutral: [
'The trout have no strong feelings about this {type}.',
'This {type} is neither upstream nor downstream. Neutral waters.',
'The trout are watching this {type} with mild interest.',
],
},
commentMarker: '<!-- troutbot -->',
allowUpdates: false, // set true to update comments when CI finishes (requires check_suite webhook)
},
logging: {
level: 'info',
file: 'troutbot.log',
},
};
export default config;

31
eslint.config.mjs Normal file
View file

@ -0,0 +1,31 @@
import tseslint from '@typescript-eslint/eslint-plugin';
import tsparser from '@typescript-eslint/parser';
export default [
{
files: ['**/*.ts', '**/*.tsx', '**/*.js', '**/*.jsx'],
ignores: ['dist/**', 'node_modules/**'],
languageOptions: {
ecmaVersion: 2022,
sourceType: 'module',
parser: tsparser,
globals: {
console: 'readonly',
process: 'readonly',
setTimeout: 'readonly',
setInterval: 'readonly',
clearTimeout: 'readonly',
clearInterval: 'readonly',
},
},
plugins: {
'@typescript-eslint': tseslint,
},
rules: {
...tseslint.configs.recommended.rules,
'@typescript-eslint/no-explicit-any': 'warn',
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
'no-console': 'off',
},
},
];

32
package.json Normal file
View file

@ -0,0 +1,32 @@
{
"name": "troutbot",
"version": "1.0.0",
"description": "GitHub webhook bot that analyzes issues and PRs for impact on the trout population",
"main": "dist/index.js",
"scripts": {
"build": "tsup src/index.ts --format cjs --dts --clean",
"start": "node dist/index.js",
"dev": "tsx src/index.ts",
"lint": "eslint .",
"fmt": "prettier --write ."
},
"dependencies": {
"@octokit/rest": "^21.0.0",
"dotenv": "^16.4.0",
"express": "^4.21.0",
"express-rate-limit": "^8.2.1",
"jiti": "^2.4.0",
"winston": "^3.14.0"
},
"devDependencies": {
"@types/express": "^5.0.0",
"@types/node": "^22.0.0",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"eslint": "^9.0.0",
"prettier": "^3.3.0",
"tsup": "^8.3.0",
"tsx": "^4.19.0",
"typescript": "^5.6.0"
}
}

3733
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load diff

15
prettier.config.mjs Normal file
View file

@ -0,0 +1,15 @@
export default {
printWidth: 100,
tabWidth: 2,
useTabs: false,
semi: true,
singleQuote: true,
quoteProps: 'as-needed',
jsxSingleQuote: false,
trailingComma: 'es5',
bracketSpacing: true,
bracketSameLine: false,
arrowParens: 'always',
endOfLine: 'lf',
plugins: [],
};

145
src/config.ts Normal file
View file

@ -0,0 +1,145 @@
import fs from 'node:fs';
import path from 'node:path';
import { createJiti } from 'jiti';
import dotenv from 'dotenv';
import type { Config } from './types.js';
dotenv.config();
const jiti = createJiti(__filename, { interopDefault: true });
const defaults: Config = {
server: { port: 3000 },
repositories: [],
filters: {
labels: { include: [], exclude: [] },
authors: { exclude: [] },
branches: { include: [] },
},
engine: {
backends: {
checks: { enabled: true },
diff: { enabled: true, maxChanges: 1000, requireTests: false },
quality: { enabled: true, minBodyLength: 50 },
},
weights: {
checks: 0.4,
diff: 0.3,
quality: 0.3,
},
confidenceThreshold: 0.1,
},
response: {
includeConfidence: true,
includeReasoning: true,
messages: {
positive: [
'This {type} looks great for the trout! All signals point upstream.',
'The trout approve of this {type}. Swim on!',
'Splashing good news — this {type} is looking healthy.',
],
negative: [
'This {type} is muddying the waters. The trout are concerned.',
'Warning: the trout sense trouble in this {type}.',
'Something smells fishy about this {type}. Please review.',
],
neutral: [
'The trout have no strong feelings about this {type}.',
'This {type} is neither upstream nor downstream. Neutral waters.',
'The trout are watching this {type} with mild interest.',
],
},
commentMarker: '<!-- troutbot -->',
allowUpdates: false,
},
logging: {
level: 'info',
file: 'troutbot.log',
},
};
function deepMerge<T extends Record<string, unknown>>(target: T, source: Partial<T>): T {
const result = { ...target };
for (const key of Object.keys(source) as (keyof T)[]) {
const sourceVal = source[key];
const targetVal = target[key];
if (
sourceVal !== null &&
sourceVal !== undefined &&
typeof sourceVal === 'object' &&
!Array.isArray(sourceVal) &&
typeof targetVal === 'object' &&
!Array.isArray(targetVal) &&
targetVal !== null
) {
result[key] = deepMerge(
targetVal as Record<string, unknown>,
sourceVal as Record<string, unknown>
) as T[keyof T];
} else if (sourceVal !== undefined) {
result[key] = sourceVal as T[keyof T];
}
}
return result;
}
export function loadConfig(): Config {
const configPath = process.env.CONFIG_PATH || 'config.ts';
const resolvedPath = path.resolve(configPath);
let fileConfig: Partial<Config> = {};
if (fs.existsSync(resolvedPath)) {
const loaded = jiti(resolvedPath) as Partial<Config> | { default: Partial<Config> };
fileConfig = 'default' in loaded ? loaded.default : loaded;
} else if (process.env.CONFIG_PATH) {
console.warn(
`Warning: CONFIG_PATH is set to "${process.env.CONFIG_PATH}" but file not found at ${resolvedPath}`
);
}
const config = deepMerge(defaults, fileConfig);
// Environment variable overrides
if (process.env.PORT) {
const parsed = parseInt(process.env.PORT, 10);
if (Number.isNaN(parsed)) {
throw new Error(`Invalid PORT value: "${process.env.PORT}" is not a number`);
}
config.server.port = parsed;
}
const validLogLevels = ['debug', 'info', 'warn', 'error'];
if (process.env.LOG_LEVEL) {
if (!validLogLevels.includes(process.env.LOG_LEVEL)) {
throw new Error(
`Invalid LOG_LEVEL: "${process.env.LOG_LEVEL}". Must be one of: ${validLogLevels.join(', ')}`
);
}
config.logging.level = process.env.LOG_LEVEL;
}
validate(config);
return config;
}
function validate(config: Config): void {
if (!config.server.port || config.server.port < 1 || config.server.port > 65535) {
throw new Error('Invalid server port');
}
const { backends } = config.engine;
if (!backends.checks.enabled && !backends.diff.enabled && !backends.quality.enabled) {
throw new Error('At least one engine backend must be enabled');
}
const { weights } = config.engine;
for (const [key, value] of Object.entries(weights)) {
if (value < 0) {
throw new Error(`Backend weight "${key}" must be non-negative, got ${value}`);
}
}
if (config.engine.confidenceThreshold < 0 || config.engine.confidenceThreshold > 1) {
throw new Error('confidenceThreshold must be between 0 and 1');
}
}

74
src/engine/checks.ts Normal file
View file

@ -0,0 +1,74 @@
import type { AnalysisResult, ChecksBackendConfig, EngineBackend, WebhookEvent } from '../types.js';
import { fetchCheckRuns } from '../github.js';
import { getLogger } from '../logger.js';
export class ChecksBackend implements EngineBackend {
name = 'checks';
constructor(private config: ChecksBackendConfig) {}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
if (event.type !== 'pull_request' || !event.sha) {
return { impact: 'neutral', confidence: 0, reasoning: 'Not a PR or no SHA available.' };
}
let runs;
try {
runs = await fetchCheckRuns(event.owner, event.repo, event.sha);
} catch (err) {
getLogger().warn(
`Failed to fetch check runs for ${event.owner}/${event.repo}@${event.sha}`,
err
);
return { impact: 'neutral', confidence: 0, reasoning: 'Could not fetch CI check results.' };
}
if (runs.length === 0) {
return { impact: 'neutral', confidence: 0, reasoning: 'No CI checks found.' };
}
const completed = runs.filter((r) => r.status === 'completed');
if (completed.length === 0) {
return { impact: 'neutral', confidence: 0.1, reasoning: 'CI checks are still running.' };
}
const passed = completed.filter((r) => r.conclusion === 'success');
const failed = completed.filter(
(r) =>
r.conclusion === 'failure' || r.conclusion === 'timed_out' || r.conclusion === 'cancelled'
);
const skipped = completed.filter(
(r) => r.conclusion === 'neutral' || r.conclusion === 'skipped'
);
const actionable = completed.length - skipped.length;
if (actionable === 0) {
return { impact: 'neutral', confidence: 0.2, reasoning: 'All CI checks were skipped.' };
}
const passRate = passed.length / actionable;
const confidence = Math.min(1, actionable / 5); // more checks = more confidence, caps at 5
let impact: AnalysisResult['impact'];
if (failed.length === 0) {
impact = 'positive';
} else if (passRate < 0.5) {
impact = 'negative';
} else {
impact = 'negative'; // any failure is a problem
}
const parts: string[] = [];
if (passed.length > 0)
parts.push(`${passed.length} passed (${passed.map((r) => r.name).join(', ')})`);
if (failed.length > 0)
parts.push(`${failed.length} failed (${failed.map((r) => r.name).join(', ')})`);
if (skipped.length > 0) parts.push(`${skipped.length} skipped`);
return {
impact,
confidence,
reasoning: `CI: ${parts.join('; ')}. Pass rate: ${(passRate * 100).toFixed(0)}%.`,
};
}
}

89
src/engine/diff.ts Normal file
View file

@ -0,0 +1,89 @@
import type { AnalysisResult, DiffBackendConfig, EngineBackend, WebhookEvent } from '../types.js';
import { fetchPRFiles } from '../github.js';
import { getLogger } from '../logger.js';
const TEST_FILE_PATTERN = /\b(test|spec|__tests__|_test|_spec|\.test\.|\.spec\.)\b/i;
export class DiffBackend implements EngineBackend {
name = 'diff';
constructor(private config: DiffBackendConfig) {}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
if (event.type !== 'pull_request') {
return { impact: 'neutral', confidence: 0, reasoning: 'Not a PR.' };
}
let files;
try {
files = await fetchPRFiles(event.owner, event.repo, event.number);
} catch (err) {
getLogger().warn(
`Failed to fetch PR files for ${event.owner}/${event.repo}#${event.number}`,
err
);
return { impact: 'neutral', confidence: 0, reasoning: 'Could not fetch PR diff.' };
}
if (files.length === 0) {
return { impact: 'neutral', confidence: 0.1, reasoning: 'Empty diff.' };
}
const totalAdditions = files.reduce((s, f) => s + f.additions, 0);
const totalDeletions = files.reduce((s, f) => s + f.deletions, 0);
const totalChanges = totalAdditions + totalDeletions;
const hasTestChanges = files.some((f) => TEST_FILE_PATTERN.test(f.filename));
const signals: { name: string; positive: boolean }[] = [];
// Size signals
if (totalChanges <= 200) {
signals.push({ name: 'small PR', positive: true });
} else if (totalChanges > this.config.maxChanges) {
signals.push({ name: `large PR (${totalChanges} lines)`, positive: false });
}
// File count
if (files.length <= 10) {
signals.push({ name: 'focused changeset', positive: true });
} else if (files.length > 30) {
signals.push({ name: `sprawling changeset (${files.length} files)`, positive: false });
}
// Test presence
if (hasTestChanges) {
signals.push({ name: 'includes tests', positive: true });
} else if (this.config.requireTests && totalChanges > 50) {
signals.push({ name: 'no test changes', positive: false });
}
// Net deletion is generally good (removing dead code)
if (totalDeletions > totalAdditions && totalDeletions > 10) {
signals.push({ name: 'net code removal', positive: true });
}
const positiveCount = signals.filter((s) => s.positive).length;
const negativeCount = signals.filter((s) => !s.positive).length;
let impact: AnalysisResult['impact'];
if (positiveCount > negativeCount) {
impact = 'positive';
} else if (negativeCount > positiveCount) {
impact = 'negative';
} else {
impact = 'neutral';
}
const confidence =
signals.length > 0
? Math.min(1, Math.abs(positiveCount - negativeCount) / signals.length + 0.2)
: 0;
const reasoning =
signals.length > 0
? `Diff: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}. ${totalAdditions} additions, ${totalDeletions} deletions across ${files.length} files.`
: 'No diff signals.';
return { impact, confidence, reasoning };
}
}

117
src/engine/index.ts Normal file
View file

@ -0,0 +1,117 @@
import type {
AnalysisResult,
EngineBackend,
EngineConfig,
Impact,
WebhookEvent,
} from '../types.js';
import { ChecksBackend } from './checks.js';
import { DiffBackend } from './diff.js';
import { QualityBackend } from './quality.js';
import { getLogger } from '../logger.js';
const impactToNumeric: Record<Impact, number> = {
positive: 1,
neutral: 0,
negative: -1,
};
interface WeightedBackend {
backend: EngineBackend;
weight: number;
}
export class Engine {
private backends: WeightedBackend[] = [];
private confidenceThreshold: number;
constructor(config: EngineConfig) {
this.confidenceThreshold = config.confidenceThreshold;
if (config.backends.checks.enabled) {
this.backends.push({
backend: new ChecksBackend(config.backends.checks),
weight: config.weights.checks,
});
}
if (config.backends.diff.enabled) {
this.backends.push({
backend: new DiffBackend(config.backends.diff),
weight: config.weights.diff,
});
}
if (config.backends.quality.enabled) {
this.backends.push({
backend: new QualityBackend(config.backends.quality),
weight: config.weights.quality,
});
}
if (this.backends.length === 0) {
throw new Error('No engine backends enabled');
}
}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
const logger = getLogger();
const results = await Promise.all(
this.backends.map(async ({ backend, weight }) => {
try {
const result = await backend.analyze(event);
logger.debug(
`Backend "${backend.name}": impact=${result.impact}, confidence=${result.confidence.toFixed(2)}`
);
return { result, weight };
} catch (err) {
logger.error(`Backend "${backend.name}" threw unexpectedly`, err);
return {
result: {
impact: 'neutral' as Impact,
confidence: 0,
reasoning: `${backend.name}: error`,
},
weight,
};
}
})
);
// Filter to backends that actually produced a signal (confidence > 0)
const active = results.filter((r) => r.result.confidence > 0);
if (active.length === 0) {
return { impact: 'neutral', confidence: 0, reasoning: 'No backends produced a signal.' };
}
const totalWeight = active.reduce((s, r) => s + r.weight, 0);
const combinedScore =
active.reduce(
(s, r) => s + impactToNumeric[r.result.impact] * r.result.confidence * r.weight,
0
) / totalWeight;
const combinedConfidence =
active.reduce((s, r) => s + r.result.confidence * r.weight, 0) / totalWeight;
let impact: Impact;
if (combinedScore > 0.1) {
impact = 'positive';
} else if (combinedScore < -0.1) {
impact = 'negative';
} else {
impact = 'neutral';
}
if (combinedConfidence < this.confidenceThreshold) {
impact = 'neutral';
}
const reasoning = results
.filter((r) => r.result.confidence > 0)
.map((r) => r.result.reasoning)
.join(' ');
return { impact, confidence: combinedConfidence, reasoning };
}
}
export function createEngine(config: EngineConfig): Engine {
return new Engine(config);
}

100
src/engine/quality.ts Normal file
View file

@ -0,0 +1,100 @@
import type {
AnalysisResult,
EngineBackend,
QualityBackendConfig,
WebhookEvent,
} from '../types.js';
export class QualityBackend implements EngineBackend {
name = 'quality';
constructor(private config: QualityBackendConfig) {}
async analyze(event: WebhookEvent): Promise<AnalysisResult> {
const body = event.body.trim();
const signals: { name: string; positive: boolean }[] = [];
// --- Negative signals (check first, they can short-circuit) ---
if (body.length === 0) {
signals.push({ name: 'empty body', positive: false });
} else if (body.length < this.config.minBodyLength) {
signals.push({ name: `short body (${body.length} chars)`, positive: false });
}
// --- Positive structural signals ---
if (body.length >= this.config.minBodyLength) {
signals.push({ name: 'adequate description', positive: true });
}
if (/```[\s\S]*?```/.test(body)) {
signals.push({ name: 'has code blocks', positive: true });
}
if (/^#{1,6}\s/m.test(body) || /\*\*[^*]+\*\*:?/m.test(body)) {
signals.push({ name: 'has structure/headers', positive: true });
}
// --- Type-specific signals ---
if (event.type === 'issue') {
if (/\b(steps?\s+to\s+reproduce|reproduction|repro\s+steps?)\b/i.test(body)) {
signals.push({ name: 'has reproduction steps', positive: true });
}
if (/\b(expected|actual)\s+(behavior|behaviour|result|output)\b/i.test(body)) {
signals.push({ name: 'has expected/actual behavior', positive: true });
}
if (/\b(version|environment|os|platform|browser)\b/i.test(body)) {
signals.push({ name: 'has environment info', positive: true });
}
}
if (event.type === 'pull_request') {
if (/\b(fix(es)?|clos(es|ing)|resolv(es|ing))\s+#\d+/i.test(body)) {
signals.push({ name: 'links to issue', positive: true });
}
if (/\b(test\s*(plan|strategy|coverage)|how\s+to\s+test|testing)\b/i.test(body)) {
signals.push({ name: 'has test plan', positive: true });
}
}
// Shared: references to other issues/PRs
if (/#\d+/.test(body)) {
signals.push({ name: 'references issues/PRs', positive: true });
}
// Screenshots or images
if (/!\[.*\]\(.*\)/.test(body) || /<img\s/i.test(body)) {
signals.push({ name: 'has images/screenshots', positive: true });
}
// --- Scoring ---
if (signals.length === 0) {
return { impact: 'neutral', confidence: 0.1, reasoning: 'No quality signals detected.' };
}
const positiveCount = signals.filter((s) => s.positive).length;
const negativeCount = signals.filter((s) => !s.positive).length;
let impact: AnalysisResult['impact'];
if (positiveCount > negativeCount) {
impact = 'positive';
} else if (negativeCount > positiveCount) {
impact = 'negative';
} else {
impact = 'neutral';
}
// Confidence scales with signal count
const confidence = Math.min(1, (positiveCount + negativeCount) / 6 + 0.15);
const reasoning = `Quality: ${signals.map((s) => `${s.positive ? '+' : '-'} ${s.name}`).join(', ')}.`;
return { impact, confidence, reasoning };
}
}

43
src/filters.ts Normal file
View file

@ -0,0 +1,43 @@
import type { FiltersConfig, WebhookEvent } from './types.js';
export function shouldProcess(
event: WebhookEvent,
filters: FiltersConfig
): { pass: boolean; reason?: string } {
// Label filters
if (filters.labels.include.length > 0) {
const hasRequired = event.labels.some((l) => filters.labels.include.includes(l));
if (!hasRequired) {
return { pass: false, reason: 'Missing required label' };
}
}
if (filters.labels.exclude.length > 0) {
const hasExcluded = event.labels.some((l) => filters.labels.exclude.includes(l));
if (hasExcluded) {
return { pass: false, reason: 'Has excluded label' };
}
}
// Author filters
if (filters.authors.include && filters.authors.include.length > 0) {
if (!filters.authors.include.includes(event.author)) {
return { pass: false, reason: 'Author not in include list' };
}
}
if (filters.authors.exclude.length > 0) {
if (filters.authors.exclude.includes(event.author)) {
return { pass: false, reason: 'Author is excluded' };
}
}
// Branch filters (PRs only)
if (event.branch && filters.branches.include.length > 0) {
if (!filters.branches.include.includes(event.branch)) {
return { pass: false, reason: 'Branch not in include list' };
}
}
return { pass: true };
}

187
src/github.ts Normal file
View file

@ -0,0 +1,187 @@
import { Octokit } from '@octokit/rest';
import { getLogger } from './logger.js';
import type { CheckRun, PRFile, ResponseConfig } from './types.js';
let octokit: Octokit | null = null;
export function initGitHub(token?: string): void {
if (!token) {
getLogger().warn('No GITHUB_TOKEN set — running in dry-run mode, comments will not be posted');
return;
}
octokit = new Octokit({ auth: token });
}
export function isDryRun(): boolean {
return octokit === null;
}
// --- Comment operations ---
export async function postComment(
owner: string,
repo: string,
issueNumber: number,
body: string
): Promise<void> {
if (!octokit) {
getLogger().info(`[dry-run] Would post comment on ${owner}/${repo}#${issueNumber}:\n${body}`);
return;
}
await octokit.issues.createComment({ owner, repo, issue_number: issueNumber, body });
getLogger().info(`Posted comment on ${owner}/${repo}#${issueNumber}`);
}
export async function hasExistingComment(
owner: string,
repo: string,
issueNumber: number,
marker: string
): Promise<{ exists: boolean; commentId?: number }> {
if (!octokit) {
return { exists: false };
}
const comments = await octokit.paginate(octokit.issues.listComments, {
owner,
repo,
issue_number: issueNumber,
per_page: 100,
});
const existing = comments.find((c) => c.body?.includes(marker));
if (existing) {
return { exists: true, commentId: existing.id };
}
return { exists: false };
}
export async function updateComment(
owner: string,
repo: string,
commentId: number,
body: string
): Promise<void> {
if (!octokit) {
getLogger().info(`[dry-run] Would update comment ${commentId}:\n${body}`);
return;
}
await octokit.issues.updateComment({ owner, repo, comment_id: commentId, body });
getLogger().info(`Updated comment ${commentId} on ${owner}/${repo}`);
}
// --- Data fetching for engine backends ---
export async function fetchCheckRuns(
owner: string,
repo: string,
ref: string
): Promise<CheckRun[]> {
if (!octokit) {
getLogger().debug('[dry-run] Cannot fetch check runs without a token');
return [];
}
const response = await octokit.checks.listForRef({
owner,
repo,
ref,
per_page: 100,
});
return response.data.check_runs.map((run) => ({
name: run.name,
status: run.status,
conclusion: run.conclusion,
}));
}
export async function fetchPRFiles(
owner: string,
repo: string,
prNumber: number
): Promise<PRFile[]> {
if (!octokit) {
getLogger().debug('[dry-run] Cannot fetch PR files without a token');
return [];
}
const files = await octokit.paginate(octokit.pulls.listFiles, {
owner,
repo,
pull_number: prNumber,
per_page: 100,
});
return files.map((f) => ({
filename: f.filename,
additions: f.additions,
deletions: f.deletions,
changes: f.changes,
}));
}
export async function fetchPR(
owner: string,
repo: string,
prNumber: number
): Promise<{
title: string;
body: string;
author: string;
labels: string[];
branch: string;
sha: string;
} | null> {
if (!octokit) return null;
const { data } = await octokit.pulls.get({ owner, repo, pull_number: prNumber });
return {
title: data.title,
body: data.body || '',
author: data.user?.login || '',
labels: (data.labels || []).map((l) => (typeof l === 'string' ? l : l.name || '')),
branch: data.head.ref,
sha: data.head.sha,
};
}
// --- Comment formatting ---
function pickRandom(list: string[]): string {
return list[Math.floor(Math.random() * list.length)];
}
export function formatComment(
responseConfig: ResponseConfig,
type: 'issue' | 'pull_request',
impact: string,
confidence: number,
reasoning: string
): string {
const typeLabel = type === 'pull_request' ? 'pull request' : 'issue';
const { messages } = responseConfig;
let messageList: string[];
if (impact === 'positive') {
messageList = messages.positive;
} else if (impact === 'negative') {
messageList = messages.negative;
} else {
messageList = messages.neutral;
}
const template = pickRandom(messageList);
let body = responseConfig.commentMarker + '\n\n';
body += template.replace(/\{type\}/g, typeLabel).replace(/\{impact\}/g, impact);
if (responseConfig.includeConfidence) {
body += `\n\n**Confidence:** ${(confidence * 100).toFixed(0)}%`;
}
if (responseConfig.includeReasoning) {
body += `\n\n**Analysis:** ${reasoning}`;
}
return body;
}

50
src/index.ts Normal file
View file

@ -0,0 +1,50 @@
import { loadConfig } from './config.js';
import { initLogger, getLogger } from './logger.js';
import { initGitHub } from './github.js';
import { createApp } from './server.js';
function main() {
const config = loadConfig();
initLogger(config.logging);
const logger = getLogger();
initGitHub(process.env.GITHUB_TOKEN);
if (!process.env.GITHUB_TOKEN) {
logger.warn(
'No GITHUB_TOKEN — running in dry-run mode (checks and diff backends will be inactive)'
);
}
if (!process.env.WEBHOOK_SECRET) {
logger.warn('No WEBHOOK_SECRET - webhook signature verification is disabled');
}
const app = createApp(config);
const port = config.server.port;
const enabledBackends = Object.entries(config.engine.backends)
.filter(([, v]) => v.enabled)
.map(([k]) => k);
const server = app.listen(port, () => {
logger.info(`Troutbot listening on port ${port}`);
logger.info(`Enabled backends: ${enabledBackends.join(', ')}`);
});
function shutdown(signal: string) {
logger.info(`Received ${signal}, shutting down gracefully...`);
server.close(() => {
logger.info('Server closed');
process.exit(0);
});
setTimeout(() => {
logger.warn('Graceful shutdown timed out, forcing exit');
process.exit(1);
}, 10_000).unref();
}
process.on('SIGTERM', () => shutdown('SIGTERM'));
process.on('SIGINT', () => shutdown('SIGINT'));
}
main();

53
src/logger.ts Normal file
View file

@ -0,0 +1,53 @@
import winston from 'winston';
import type { LoggingConfig } from './types.js';
const redactSecrets = winston.format((info) => {
const msg = info.message as string;
if (typeof msg === 'string') {
info.message = msg
.replace(/ghp_[a-zA-Z0-9]{36,}/g, 'ghp_***REDACTED***')
.replace(/(?<=secret[=: ]+)[^\s,}]+/gi, '***REDACTED***');
}
return info;
});
let logger: winston.Logger;
export function initLogger(config: LoggingConfig): winston.Logger {
const transports: winston.transport[] = [
new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
}),
];
if (config.file) {
transports.push(
new winston.transports.File({
filename: config.file,
format: winston.format.combine(winston.format.timestamp(), winston.format.json()),
})
);
}
logger = winston.createLogger({
level: config.level || 'info',
format: winston.format.combine(
redactSecrets(),
winston.format.timestamp(),
winston.format.errors({ stack: true })
),
transports,
});
return logger;
}
export function getLogger(): winston.Logger {
if (!logger) {
logger = winston.createLogger({
level: 'info',
transports: [new winston.transports.Console()],
});
}
return logger;
}

277
src/server.ts Normal file
View file

@ -0,0 +1,277 @@
import crypto from 'node:crypto';
import express from 'express';
import rateLimit from 'express-rate-limit';
import type { Config, WebhookEvent } from './types.js';
import { shouldProcess } from './filters.js';
import { createEngine } from './engine/index.js';
import {
fetchPR,
formatComment,
hasExistingComment,
postComment,
updateComment,
} from './github.js';
import { getLogger } from './logger.js';
const startTime = Date.now();
export function createApp(config: Config): express.Express {
const app = express();
const logger = getLogger();
const engine = createEngine(config.engine);
app.use(
express.json({
limit: '1mb',
verify: (req, _res, buf) => {
(req as unknown as Record<string, Buffer>).rawBody = buf;
},
})
);
app.use((_req, res, next) => {
res.setTimeout(30_000, () => {
logger.warn('Response timeout reached (30s)');
if (!res.headersSent) {
res.status(504).json({ error: 'Response timeout' });
}
});
next();
});
const webhookLimiter = rateLimit({
windowMs: 60_000,
limit: config.server.rateLimit ?? 120,
standardHeaders: 'draft-7',
legacyHeaders: false,
message: { error: 'Too many requests, please try again later' },
});
const enabledBackends = Object.entries(config.engine.backends)
.filter(([, v]) => v.enabled)
.map(([k]) => k);
app.get('/health', (_req, res) => {
res.json({
status: 'ok',
uptime: Math.floor((Date.now() - startTime) / 1000),
version: process.env.npm_package_version ?? 'unknown',
dryRun: !process.env.GITHUB_TOKEN,
backends: enabledBackends,
});
});
app.post('/webhook', webhookLimiter, async (req, res) => {
try {
// Signature verification
const secret = process.env.WEBHOOK_SECRET;
if (secret) {
const signature = req.headers['x-hub-signature-256'] as string | undefined;
if (!signature) {
logger.warn('Missing webhook signature');
res.status(401).json({ error: 'Missing signature' });
return;
}
const rawBody = (req as unknown as Record<string, Buffer>).rawBody;
const expected =
'sha256=' + crypto.createHmac('sha256', secret).update(rawBody).digest('hex');
if (!crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expected))) {
logger.warn('Invalid webhook signature');
res.status(401).json({ error: 'Invalid signature' });
return;
}
}
const eventType = req.headers['x-github-event'] as string;
const payload = req.body;
// Handle check_suite completion — re-analyze associated PRs
if (eventType === 'check_suite' && payload.action === 'completed') {
await handleCheckSuiteCompleted(payload, config, engine);
res.json({ processed: true, event: 'check_suite' });
return;
}
if (eventType !== 'issues' && eventType !== 'pull_request') {
res.json({ skipped: true, reason: `Unhandled event: ${eventType}` });
return;
}
const action = payload.action as string;
if (!['opened', 'edited', 'synchronize'].includes(action)) {
res.json({ skipped: true, reason: `Unhandled action: ${action}` });
return;
}
const event = parseEvent(eventType, payload);
if (!event) {
res.json({ skipped: true, reason: 'Could not parse event' });
return;
}
const result = await analyzeAndComment(event, config, engine);
res.json(result);
} catch (err) {
logger.error('Error processing webhook', err);
res.status(500).json({ error: 'Internal server error' });
}
});
return app;
}
async function analyzeAndComment(
event: WebhookEvent,
config: Config,
engine: ReturnType<typeof createEngine>
): Promise<Record<string, unknown>> {
const logger = getLogger();
// Check if repo is configured
if (config.repositories.length > 0) {
const repoMatch = config.repositories.some(
(r) => r.owner === event.owner && r.repo === event.repo
);
if (!repoMatch) {
logger.debug(`Ignoring event for unconfigured repo ${event.owner}/${event.repo}`);
return { skipped: true, reason: 'Repository not configured' };
}
}
// Apply filters
const filterResult = shouldProcess(event, config.filters);
if (!filterResult.pass) {
logger.debug(`Filtered out: ${filterResult.reason}`);
return { skipped: true, reason: filterResult.reason };
}
// Run analysis
const result = await engine.analyze(event);
logger.info(
`Analyzed ${event.owner}/${event.repo}#${event.number}: impact=${result.impact}, confidence=${result.confidence.toFixed(2)}`
);
// Check for existing comment
const { commentMarker, allowUpdates } = config.response;
const existing = await hasExistingComment(event.owner, event.repo, event.number, commentMarker);
if (existing.exists && !allowUpdates) {
logger.info(`Already commented on ${event.owner}/${event.repo}#${event.number}, skipping`);
return { skipped: true, reason: 'Already commented' };
}
const body = formatComment(
config.response,
event.type,
result.impact,
result.confidence,
result.reasoning
);
if (existing.exists && allowUpdates && existing.commentId) {
await updateComment(event.owner, event.repo, existing.commentId, body);
} else {
await postComment(event.owner, event.repo, event.number, body);
}
return { processed: true, impact: result.impact, confidence: result.confidence };
}
async function handleCheckSuiteCompleted(
payload: Record<string, unknown>,
config: Config,
engine: ReturnType<typeof createEngine>
): Promise<void> {
const logger = getLogger();
if (!config.response.allowUpdates) {
logger.debug('check_suite received but allowUpdates is false, skipping');
return;
}
const checkSuite = payload.check_suite as Record<string, unknown>;
const pullRequests = (checkSuite.pull_requests as Array<Record<string, unknown>>) || [];
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
const repoName = repo.name as string;
for (const pr of pullRequests) {
const prNumber = pr.number as number;
logger.info(`Re-analyzing ${owner}/${repoName}#${prNumber} after check_suite completed`);
try {
const prData = await fetchPR(owner, repoName, prNumber);
if (!prData) {
logger.warn(`Could not fetch PR ${owner}/${repoName}#${prNumber}`);
continue;
}
const event: WebhookEvent = {
action: 'check_suite_completed',
type: 'pull_request',
number: prNumber,
title: prData.title,
body: prData.body,
owner,
repo: repoName,
author: prData.author,
labels: prData.labels,
branch: prData.branch,
sha: prData.sha,
};
await analyzeAndComment(event, config, engine);
} catch (err) {
logger.error(`Failed to re-analyze PR ${owner}/${repoName}#${prNumber}`, err);
}
}
}
function parseEvent(eventType: string, payload: Record<string, unknown>): WebhookEvent | null {
try {
if (eventType === 'issues') {
const issue = payload.issue as Record<string, unknown>;
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
return {
action: payload.action as string,
type: 'issue',
number: issue.number as number,
title: (issue.title as string) || '',
body: (issue.body as string) || '',
owner,
repo: repo.name as string,
author: (issue.user as Record<string, unknown>).login as string,
labels: ((issue.labels as Array<Record<string, unknown>>) || []).map(
(l) => l.name as string
),
};
}
if (eventType === 'pull_request') {
const pr = payload.pull_request as Record<string, unknown>;
const repo = payload.repository as Record<string, unknown>;
const owner = (repo.owner as Record<string, unknown>).login as string;
const head = pr.head as Record<string, unknown>;
return {
action: payload.action as string,
type: 'pull_request',
number: pr.number as number,
title: (pr.title as string) || '',
body: (pr.body as string) || '',
owner,
repo: repo.name as string,
author: (pr.user as Record<string, unknown>).login as string,
labels: ((pr.labels as Array<Record<string, unknown>>) || []).map((l) => l.name as string),
branch: head.ref as string,
sha: head.sha as string,
};
}
return null;
} catch {
return null;
}
}

122
src/types.ts Normal file
View file

@ -0,0 +1,122 @@
export interface Config {
server: ServerConfig;
repositories: RepoConfig[];
filters: FiltersConfig;
engine: EngineConfig;
response: ResponseConfig;
logging: LoggingConfig;
}
export interface ServerConfig {
port: number;
rateLimit?: number;
}
export interface RepoConfig {
owner: string;
repo: string;
}
export interface FiltersConfig {
labels: {
include: string[];
exclude: string[];
};
authors: {
include?: string[];
exclude: string[];
};
branches: {
include: string[];
};
}
export interface EngineConfig {
backends: BackendsConfig;
weights: BackendWeights;
confidenceThreshold: number;
}
export interface BackendsConfig {
checks: ChecksBackendConfig;
diff: DiffBackendConfig;
quality: QualityBackendConfig;
}
export interface ChecksBackendConfig {
enabled: boolean;
}
export interface DiffBackendConfig {
enabled: boolean;
maxChanges: number;
requireTests: boolean;
}
export interface QualityBackendConfig {
enabled: boolean;
minBodyLength: number;
}
export interface BackendWeights {
checks: number;
diff: number;
quality: number;
}
export interface ResponseConfig {
includeConfidence: boolean;
includeReasoning: boolean;
messages: {
positive: string[];
negative: string[];
neutral: string[];
};
commentMarker: string;
allowUpdates: boolean;
}
export interface LoggingConfig {
level: string;
file: string;
}
export type Impact = 'positive' | 'negative' | 'neutral';
export interface AnalysisResult {
impact: Impact;
confidence: number;
reasoning: string;
}
export interface EngineBackend {
name: string;
analyze(event: WebhookEvent): Promise<AnalysisResult>;
}
export interface WebhookEvent {
action: string;
type: 'issue' | 'pull_request';
number: number;
title: string;
body: string;
owner: string;
repo: string;
author: string;
labels: string[];
branch?: string;
sha?: string;
}
export interface CheckRun {
name: string;
status: string;
conclusion: string | null;
}
export interface PRFile {
filename: string;
additions: number;
deletions: number;
changes: number;
}

17
tsconfig.json Normal file
View file

@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "bundler",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"resolveJsonModule": true
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}