diff --git a/crates/common/migrations/012_build_metrics.sql b/crates/common/migrations/012_build_metrics.sql new file mode 100644 index 0000000..b9ca6a3 --- /dev/null +++ b/crates/common/migrations/012_build_metrics.sql @@ -0,0 +1,45 @@ +-- Migration: Add build metrics collection +-- Stores timing, size, and performance metrics for builds + +-- Create build_metrics table +CREATE TABLE build_metrics ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + build_id UUID NOT NULL REFERENCES builds(id) ON DELETE CASCADE, + metric_name VARCHAR(100) NOT NULL, + metric_value DOUBLE PRECISION NOT NULL, + unit VARCHAR(50) NOT NULL, + collected_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() +); + +-- Index for efficient lookups by build +CREATE INDEX idx_build_metrics_build_id ON build_metrics(build_id); + +-- Index for time-based queries (alerting) +CREATE INDEX idx_build_metrics_collected_at ON build_metrics(collected_at); + +-- Index for metric name filtering +CREATE INDEX idx_build_metrics_name ON build_metrics(metric_name); + +-- Prevent duplicate metrics for same build+name +ALTER TABLE build_metrics ADD CONSTRAINT unique_build_metric_name UNIQUE (build_id, metric_name); + +-- Create view for aggregate build statistics +CREATE VIEW build_metrics_summary AS +SELECT + b.id as build_id, + b.job_name, + b.status, + b.system, + e.jobset_id, + j.project_id, + b.started_at, + b.completed_at, + EXTRACT(EPOCH FROM (b.completed_at - b.started_at)) as duration_seconds, + MAX(CASE WHEN bm.metric_name = 'output_size_bytes' THEN bm.metric_value END) as output_size_bytes, + MAX(CASE WHEN bm.metric_name = 'peak_memory_bytes' THEN bm.metric_value END) as peak_memory_bytes, + MAX(CASE WHEN bm.metric_name = 'nar_size_bytes' THEN bm.metric_value END) as nar_size_bytes +FROM builds b +JOIN evaluations e ON b.evaluation_id = e.id +JOIN jobsets j ON e.jobset_id = j.id +LEFT JOIN build_metrics bm ON b.id = bm.build_id +GROUP BY b.id, b.job_name, b.status, b.system, e.jobset_id, j.project_id, b.started_at, b.completed_at; diff --git a/crates/common/src/models.rs b/crates/common/src/models.rs index 45e73ed..bb62ff8 100644 --- a/crates/common/src/models.rs +++ b/crates/common/src/models.rs @@ -164,6 +164,26 @@ pub struct BuildDependency { pub dependency_build_id: Uuid, } +#[derive(Debug, Clone, Serialize, Deserialize, FromRow)] +pub struct BuildMetric { + pub id: Uuid, + pub build_id: Uuid, + pub metric_name: String, + pub metric_value: f64, + pub unit: String, + pub collected_at: DateTime, +} + +pub mod metric_names { + pub const BUILD_DURATION_SECONDS: &str = "build_duration_seconds"; + pub const OUTPUT_SIZE_BYTES: &str = "output_size_bytes"; +} + +pub mod metric_units { + pub const SECONDS: &str = "seconds"; + pub const BYTES: &str = "bytes"; +} + /// Active jobset view — enabled jobsets joined with project info. #[derive(Debug, Clone, Serialize, Deserialize, FromRow)] pub struct ActiveJobset { diff --git a/crates/common/src/repo/build_metrics.rs b/crates/common/src/repo/build_metrics.rs new file mode 100644 index 0000000..3f0d5b3 --- /dev/null +++ b/crates/common/src/repo/build_metrics.rs @@ -0,0 +1,60 @@ +use sqlx::PgPool; +use uuid::Uuid; + +use crate::{ + error::{CiError, Result}, + models::BuildMetric, +}; + +pub async fn upsert( + pool: &PgPool, + build_id: Uuid, + metric_name: &str, + metric_value: f64, + unit: &str, +) -> Result { + sqlx::query_as::<_, BuildMetric>( + "INSERT INTO build_metrics (build_id, metric_name, metric_value, unit) \ + VALUES ($1, $2, $3, $4) ON CONFLICT (build_id, metric_name) DO UPDATE \ + SET metric_value = EXCLUDED.metric_value, collected_at = NOW() RETURNING \ + *", + ) + .bind(build_id) + .bind(metric_name) + .bind(metric_value) + .bind(unit) + .fetch_one(pool) + .await + .map_err(CiError::Database) +} + +pub async fn calculate_failure_rate( + pool: &PgPool, + project_id: Option, + jobset_id: Option, + window_minutes: i64, +) -> Result { + let rows: Vec<(Uuid, String)> = sqlx::query_as( + "SELECT b.id, b.status::text FROM builds b JOIN evaluations e ON \ + b.evaluation_id = e.id JOIN jobsets j ON e.jobset_id = j.id WHERE \ + ($1::uuid IS NULL OR j.project_id = $1) AND ($2::uuid IS NULL OR j.id = \ + $2) AND b.completed_at > NOW() - (INTERVAL '1 minute' * $3) ORDER BY \ + b.completed_at DESC", + ) + .bind(project_id) + .bind(jobset_id) + .bind(window_minutes) + .fetch_all(pool) + .await + .map_err(CiError::Database)?; + + if rows.is_empty() { + return Ok(0.0); + } + + let failed_count = rows + .iter() + .filter(|(_, status)| *status == "Failed") + .count(); + Ok((failed_count as f64) / (rows.len() as f64) * 100.0) +} diff --git a/crates/common/src/repo/mod.rs b/crates/common/src/repo/mod.rs index 7fc1109..cb41557 100644 --- a/crates/common/src/repo/mod.rs +++ b/crates/common/src/repo/mod.rs @@ -1,5 +1,6 @@ pub mod api_keys; pub mod build_dependencies; +pub mod build_metrics; pub mod build_products; pub mod build_steps; pub mod builds;