From a2b638d4db6837176786de5b13b66ebab9bbf37b Mon Sep 17 00:00:00 2001 From: NotAShelf Date: Sat, 14 Feb 2026 13:55:07 +0300 Subject: [PATCH] nix: attempt to fix VM tests; general cleanup Signed-off-by: NotAShelf Change-Id: I65f6909ef02ab4599f5b0bbc0930367e6a6a6964 --- crates/common/src/models.rs | 2 + crates/common/src/repo/evaluations.rs | 17 + crates/evaluator/src/eval_loop.rs | 358 +++- crates/evaluator/src/git.rs | 46 +- crates/evaluator/src/nix.rs | 61 +- crates/evaluator/tests/eval_tests.rs | 25 + crates/queue-runner/src/worker.rs | 9 +- crates/server/src/auth_middleware.rs | 28 +- crates/server/src/routes/cache.rs | 123 +- crates/server/src/routes/channels.rs | 20 + crates/server/src/routes/dashboard.rs | 15 +- crates/server/src/routes/metrics.rs | 15 +- crates/server/src/routes/mod.rs | 3 +- crates/server/static/style.css | 706 ++++---- flake.nix | 35 +- nix/demo-vm.nix | 99 +- nix/modules/nixos.nix | 411 ++++- nix/tests/api-crud.nix | 4 +- nix/tests/auth-rbac.nix | 10 +- nix/tests/basic-api.nix | 14 +- nix/tests/declarative.nix | 471 ++++++ nix/tests/e2e.nix | 263 +-- nix/tests/features.nix | 1 + nix/tests/s3-cache.nix | 206 +++ nix/vm-common.nix | 111 +- nix/vm-test.nix | 2216 ------------------------- 26 files changed, 2325 insertions(+), 2944 deletions(-) create mode 100644 nix/tests/declarative.nix create mode 100644 nix/tests/s3-cache.nix delete mode 100644 nix/vm-test.nix diff --git a/crates/common/src/models.rs b/crates/common/src/models.rs index bb62ff8..da352c7 100644 --- a/crates/common/src/models.rs +++ b/crates/common/src/models.rs @@ -48,6 +48,7 @@ pub struct Evaluation { } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, sqlx::Type)] +#[serde(rename_all = "lowercase")] #[sqlx(type_name = "text", rename_all = "lowercase")] pub enum EvaluationStatus { Pending, @@ -121,6 +122,7 @@ pub struct Build { } #[derive(Debug, Clone, Serialize, Deserialize, sqlx::Type, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] #[sqlx(type_name = "text", rename_all = "lowercase")] pub enum BuildStatus { Pending, diff --git a/crates/common/src/repo/evaluations.rs b/crates/common/src/repo/evaluations.rs index b339f87..a090696 100644 --- a/crates/common/src/repo/evaluations.rs +++ b/crates/common/src/repo/evaluations.rs @@ -170,3 +170,20 @@ pub async fn count(pool: &PgPool) -> Result { .map_err(CiError::Database)?; Ok(row.0) } + +/// Get an evaluation by jobset_id and commit_hash. +pub async fn get_by_jobset_and_commit( + pool: &PgPool, + jobset_id: Uuid, + commit_hash: &str, +) -> Result> { + sqlx::query_as::<_, Evaluation>( + "SELECT * FROM evaluations WHERE jobset_id = $1 AND commit_hash = $2 \ + ORDER BY evaluation_time DESC LIMIT 1", + ) + .bind(jobset_id) + .bind(commit_hash) + .fetch_optional(pool) + .await + .map_err(CiError::Database) +} diff --git a/crates/evaluator/src/eval_loop.rs b/crates/evaluator/src/eval_loop.rs index dd36548..b261aef 100644 --- a/crates/evaluator/src/eval_loop.rs +++ b/crates/evaluator/src/eval_loop.rs @@ -1,9 +1,10 @@ use std::{collections::HashMap, time::Duration}; +use anyhow::Context; use chrono::Utc; use fc_common::{ config::EvaluatorConfig, - error::check_disk_space, + error::{CiError, check_disk_space}, models::{ CreateBuild, CreateEvaluation, @@ -15,6 +16,7 @@ use fc_common::{ }; use futures::stream::{self, StreamExt}; use sqlx::PgPool; +use tracing::info; use uuid::Uuid; pub async fn run(pool: PgPool, config: EvaluatorConfig) -> anyhow::Result<()> { @@ -172,7 +174,33 @@ async fn evaluate_jobset( "Inputs unchanged (hash: {}), skipping evaluation", &inputs_hash[..16], ); - return Ok(()); + // Create evaluation record even when skipped so system tracks this check + // Handle duplicate key conflict gracefully (another evaluator may have + // created it) - fall through to process existing evaluation instead of + // skipping + if let Err(e) = repo::evaluations::create(pool, CreateEvaluation { + jobset_id: jobset.id, + commit_hash: commit_hash.clone(), + pr_number: None, + pr_head_branch: None, + pr_base_branch: None, + pr_action: None, + }) + .await + { + if !matches!(e, CiError::Conflict(_)) { + return Err(e.into()); + } + tracing::info!( + jobset = %jobset.name, + commit = %commit_hash, + "Evaluation already exists (concurrent creation in inputs_hash path), will process" + ); + } else { + // Successfully created new evaluation, can skip + repo::jobsets::update_last_checked(pool, jobset.id).await?; + return Ok(()); + } } // Also skip if commit hasn't changed (backward compat) @@ -183,9 +211,114 @@ async fn evaluate_jobset( tracing::debug!( jobset = %jobset.name, commit = %commit_hash, - "Already evaluated, skipping" + "Inputs unchanged (hash: {}), skipping evaluation", + &inputs_hash[..16], ); - return Ok(()); + // Create evaluation record even when skipped so system tracks this check + // Handle duplicate key conflict gracefully (another evaluator may have + // created it) - fall through to process existing evaluation instead of + // skipping + if let Err(e) = repo::evaluations::create(pool, CreateEvaluation { + jobset_id: jobset.id, + commit_hash: commit_hash.clone(), + pr_number: None, + pr_head_branch: None, + pr_base_branch: None, + pr_action: None, + }) + .await + { + if !matches!(e, CiError::Conflict(_)) { + return Err(e.into()); + } + tracing::info!( + jobset = %jobset.name, + commit = %commit_hash, + "Evaluation already exists (concurrent creation in commit path), will process" + ); + let existing = repo::evaluations::get_by_jobset_and_commit( + pool, + jobset.id, + &commit_hash, + ) + .await? + .ok_or_else(|| { + anyhow::anyhow!( + "Evaluation conflict but not found: {}/{}", + jobset.id, + commit_hash + ) + })?; + + if existing.status == EvaluationStatus::Completed { + // Check if we need to re-evaluate due to no builds + let builds = + repo::builds::list_for_evaluation(pool, existing.id).await?; + if builds.is_empty() { + info!( + "Evaluation completed with 0 builds, re-running nix evaluation \ + jobset={} commit={}", + jobset.name, commit_hash + ); + // Update existing evaluation status to Running + repo::evaluations::update_status( + pool, + existing.id, + EvaluationStatus::Running, + None, + ) + .await?; + // Use existing evaluation instead of creating new one + let eval = existing; + // Run nix evaluation and create builds from the result + let eval_result = crate::nix::evaluate( + &repo_path, + &jobset.nix_expression, + jobset.flake_mode, + nix_timeout, + config, + &inputs, + ) + .await?; + + create_builds_from_eval(pool, eval.id, &eval_result).await?; + + repo::evaluations::update_status( + pool, + eval.id, + EvaluationStatus::Completed, + None, + ) + .await?; + + repo::jobsets::update_last_checked(pool, jobset.id).await?; + return Ok(()); + } else { + info!( + "Evaluation already completed with {} builds, skipping nix \ + evaluation jobset={} commit={}", + builds.len(), + jobset.name, + commit_hash + ); + repo::jobsets::update_last_checked(pool, jobset.id).await?; + return Ok(()); + } + } + + // Existing evaluation is pending or running, update status and continue + repo::evaluations::update_status( + pool, + existing.id, + EvaluationStatus::Running, + None, + ) + .await?; + } else { + // Successfully created new evaluation, can skip + repo::jobsets::update_last_checked(pool, jobset.id).await?; + return Ok(()); + } } tracing::info!( @@ -194,8 +327,9 @@ async fn evaluate_jobset( "Starting evaluation" ); - // Create evaluation record - let eval = repo::evaluations::create(pool, CreateEvaluation { + // Create evaluation record. If it already exists (race condition), fetch the + // existing one and continue. Only update status if it's still pending. + let eval = match repo::evaluations::create(pool, CreateEvaluation { jobset_id: jobset.id, commit_hash: commit_hash.clone(), pr_number: None, @@ -203,16 +337,72 @@ async fn evaluate_jobset( pr_base_branch: None, pr_action: None, }) - .await?; + .await + { + Ok(eval) => eval, + Err(CiError::Conflict(_)) => { + tracing::info!( + jobset = %jobset.name, + commit = %commit_hash, + "Evaluation already exists (conflict), fetching existing record" + ); + let existing = repo::evaluations::get_by_jobset_and_commit( + pool, + jobset.id, + &commit_hash, + ) + .await? + .ok_or_else(|| { + anyhow::anyhow!( + "Evaluation conflict but not found: {}/{}", + jobset.id, + commit_hash + ) + })?; - // Mark as running and set inputs hash - repo::evaluations::update_status( - pool, - eval.id, - EvaluationStatus::Running, - None, - ) - .await?; + if existing.status == EvaluationStatus::Pending { + repo::evaluations::update_status( + pool, + existing.id, + EvaluationStatus::Running, + None, + ) + .await?; + } else if existing.status == EvaluationStatus::Completed { + let build_count = repo::builds::count_filtered( + pool, + Some(existing.id), + None, + None, + None, + ) + .await?; + + if build_count > 0 { + info!( + "Evaluation already completed with {} builds, skipping nix \ + evaluation jobset={} commit={}", + build_count, jobset.name, commit_hash + ); + return Ok(()); + } else { + info!( + "Evaluation completed but has 0 builds, re-running nix evaluation \ + jobset={} commit={}", + jobset.name, commit_hash + ); + } + } + existing + }, + Err(e) => { + return Err(anyhow::anyhow!(e)).with_context(|| { + format!("failed to create evaluation for jobset {}", jobset.name) + }); + }, + }; + + // Set inputs hash (only needed for new evaluations, not existing ones) let _ = repo::evaluations::set_inputs_hash(pool, eval.id, &inputs_hash).await; // Check for declarative config in repo @@ -230,6 +420,7 @@ async fn evaluate_jobset( .await { Ok(eval_result) => { + tracing::debug!(jobset = %jobset.name, job_count = eval_result.jobs.len(), "Nix evaluation returned"); tracing::info!( jobset = %jobset.name, count = eval_result.jobs.len(), @@ -237,70 +428,7 @@ async fn evaluate_jobset( "Evaluation discovered jobs" ); - // Create build records, tracking drv_path -> build_id for dependency - // resolution - let mut drv_to_build: HashMap = HashMap::new(); - let mut name_to_build: HashMap = HashMap::new(); - - for job in &eval_result.jobs { - let outputs_json = job - .outputs - .as_ref() - .map(|o| serde_json::to_value(o).unwrap_or_default()); - let constituents_json = job - .constituents - .as_ref() - .map(|c| serde_json::to_value(c).unwrap_or_default()); - let is_aggregate = job.constituents.is_some(); - - let build = repo::builds::create(pool, CreateBuild { - evaluation_id: eval.id, - job_name: job.name.clone(), - drv_path: job.drv_path.clone(), - system: job.system.clone(), - outputs: outputs_json, - is_aggregate: Some(is_aggregate), - constituents: constituents_json, - }) - .await?; - - drv_to_build.insert(job.drv_path.clone(), build.id); - name_to_build.insert(job.name.clone(), build.id); - } - - // Resolve dependencies - for job in &eval_result.jobs { - let build_id = match drv_to_build.get(&job.drv_path) { - Some(id) => *id, - None => continue, - }; - - // Input derivation dependencies - if let Some(ref input_drvs) = job.input_drvs { - for dep_drv in input_drvs.keys() { - if let Some(&dep_build_id) = drv_to_build.get(dep_drv) - && dep_build_id != build_id - { - let _ = - repo::build_dependencies::create(pool, build_id, dep_build_id) - .await; - } - } - } - - // Aggregate constituent dependencies - if let Some(ref constituents) = job.constituents { - for constituent_name in constituents { - if let Some(&dep_build_id) = name_to_build.get(constituent_name) - && dep_build_id != build_id - { - let _ = - repo::build_dependencies::create(pool, build_id, dep_build_id) - .await; - } - } - } - } + create_builds_from_eval(pool, eval.id, &eval_result).await?; repo::evaluations::update_status( pool, @@ -349,6 +477,78 @@ async fn evaluate_jobset( Ok(()) } +/// Create build records from evaluation results, resolving dependencies. +async fn create_builds_from_eval( + pool: &PgPool, + eval_id: Uuid, + eval_result: &crate::nix::EvalResult, +) -> anyhow::Result<()> { + let mut drv_to_build: HashMap = HashMap::new(); + let mut name_to_build: HashMap = HashMap::new(); + + for job in &eval_result.jobs { + let outputs_json = job + .outputs + .as_ref() + .map(|o| serde_json::to_value(o).unwrap_or_default()); + let constituents_json = job + .constituents + .as_ref() + .map(|c| serde_json::to_value(c).unwrap_or_default()); + let is_aggregate = job.constituents.is_some(); + + let build = repo::builds::create(pool, CreateBuild { + evaluation_id: eval_id, + job_name: job.name.clone(), + drv_path: job.drv_path.clone(), + system: job.system.clone(), + outputs: outputs_json, + is_aggregate: Some(is_aggregate), + constituents: constituents_json, + }) + .await?; + + drv_to_build.insert(job.drv_path.clone(), build.id); + name_to_build.insert(job.name.clone(), build.id); + } + + // Resolve dependencies + for job in &eval_result.jobs { + let build_id = match drv_to_build.get(&job.drv_path) { + Some(id) => *id, + None => continue, + }; + + // Input derivation dependencies + if let Some(ref input_drvs) = job.input_drvs { + for dep_drv in input_drvs.keys() { + if let Some(&dep_build_id) = drv_to_build.get(dep_drv) + && dep_build_id != build_id + { + let _ = + repo::build_dependencies::create(pool, build_id, dep_build_id) + .await; + } + } + } + + // Aggregate constituent dependencies + if let Some(ref constituents) = job.constituents { + for constituent_name in constituents { + if let Some(&dep_build_id) = name_to_build.get(constituent_name) + && dep_build_id != build_id + { + let _ = + repo::build_dependencies::create(pool, build_id, dep_build_id) + .await; + } + } + } + } + + Ok(()) +} + /// Compute a deterministic hash over the commit and all jobset inputs. /// Used for evaluation caching — skip re-eval when inputs haven't changed. fn compute_inputs_hash(commit_hash: &str, inputs: &[JobsetInput]) -> String { diff --git a/crates/evaluator/src/git.rs b/crates/evaluator/src/git.rs index 1c0fc79..6521c44 100644 --- a/crates/evaluator/src/git.rs +++ b/crates/evaluator/src/git.rs @@ -16,7 +16,9 @@ pub fn clone_or_fetch( ) -> Result<(PathBuf, String)> { let repo_path = work_dir.join(project_name); - let repo = if repo_path.exists() { + let is_fetch = repo_path.exists(); + + let repo = if is_fetch { let repo = Repository::open(&repo_path)?; // Fetch origin — scope the borrow so `remote` is dropped before we move // `repo` @@ -29,21 +31,35 @@ pub fn clone_or_fetch( Repository::clone(url, &repo_path)? }; - // Resolve commit: use specific branch ref or fall back to HEAD - let hash = if let Some(branch_name) = branch { - let refname = format!("refs/remotes/origin/{branch_name}"); - let reference = repo.find_reference(&refname).map_err(|e| { - fc_common::error::CiError::NotFound(format!( - "Branch '{branch_name}' not found ({refname}): {e}" - )) - })?; - let commit = reference.peel_to_commit()?; - commit.id().to_string() - } else { - let head = repo.head()?; - let commit = head.peel_to_commit()?; - commit.id().to_string() + // Resolve commit from remote refs (which are always up-to-date after fetch). + // When no branch is specified, detect the default branch from local HEAD's + // tracking target. + let branch_name = match branch { + Some(b) => b.to_string(), + None => { + let head = repo.head()?; + head.shorthand().unwrap_or("master").to_string() + }, }; + let remote_ref = format!("refs/remotes/origin/{branch_name}"); + let reference = repo.find_reference(&remote_ref).map_err(|e| { + fc_common::error::CiError::NotFound(format!( + "Branch '{branch_name}' not found ({remote_ref}): {e}" + )) + })?; + let commit = reference.peel_to_commit()?; + let hash = commit.id().to_string(); + + // After fetch, update the working tree so nix evaluation sees the latest + // files. Skip on fresh clone since the checkout is already current. + if is_fetch { + repo.checkout_tree( + commit.as_object(), + Some(git2::build::CheckoutBuilder::new().force()), + )?; + repo.set_head_detached(commit.id())?; + } + Ok((repo_path, hash)) } diff --git a/crates/evaluator/src/nix.rs b/crates/evaluator/src/nix.rs index 2d2e79a..6b73dc9 100644 --- a/crates/evaluator/src/nix.rs +++ b/crates/evaluator/src/nix.rs @@ -8,22 +8,37 @@ use fc_common::{ }; use serde::Deserialize; -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone)] pub struct NixJob { pub name: String, - #[serde(alias = "drvPath")] pub drv_path: String, pub system: Option, pub outputs: Option>, - #[serde(alias = "inputDrvs")] pub input_drvs: Option>, pub constituents: Option>, } +/// Raw deserialization target for nix-eval-jobs output. +/// nix-eval-jobs emits both `attr` (attribute path) and `name` (derivation +/// name) in the same JSON object. We deserialize them separately and prefer +/// `attr` as the job identifier. +#[derive(Deserialize)] +struct RawNixJob { + name: Option, + attr: Option, + #[serde(alias = "drvPath")] + drv_path: Option, + system: Option, + outputs: Option>, + #[serde(alias = "inputDrvs")] + input_drvs: Option>, + constituents: Option>, +} + /// An error reported by nix-eval-jobs for a single job. #[derive(Debug, Clone, Deserialize)] struct NixEvalError { - #[serde(alias = "attr")] + attr: Option, name: Option, error: String, } @@ -49,7 +64,11 @@ pub fn parse_eval_output(stdout: &str) -> EvalResult { && parsed.get("error").is_some() { if let Ok(eval_err) = serde_json::from_str::(line) { - let name = eval_err.name.as_deref().unwrap_or(""); + let name = eval_err + .attr + .as_deref() + .or(eval_err.name.as_deref()) + .unwrap_or(""); tracing::warn!( job = name, "nix-eval-jobs reported error: {}", @@ -60,8 +79,20 @@ pub fn parse_eval_output(stdout: &str) -> EvalResult { continue; } - match serde_json::from_str::(line) { - Ok(job) => jobs.push(job), + match serde_json::from_str::(line) { + Ok(raw) => { + // drv_path is required for a valid job + if let Some(drv_path) = raw.drv_path { + jobs.push(NixJob { + name: raw.attr.or(raw.name).unwrap_or_default(), + drv_path, + system: raw.system, + outputs: raw.outputs, + input_drvs: raw.input_drvs, + constituents: raw.constituents, + }); + } + }, Err(e) => { tracing::warn!("Failed to parse nix-eval-jobs line: {e}"); }, @@ -100,9 +131,11 @@ async fn evaluate_flake( ) -> Result { let flake_ref = format!("{}#{}", repo_path.display(), nix_expression); + tracing::debug!(flake_ref = %flake_ref, "Running nix-eval-jobs"); + tokio::time::timeout(timeout, async { let mut cmd = tokio::process::Command::new("nix-eval-jobs"); - cmd.arg("--flake").arg(&flake_ref); + cmd.arg("--flake").arg(&flake_ref).arg("--force-recurse"); if config.restrict_eval { cmd.args(["--option", "restrict-eval", "true"]); @@ -130,6 +163,16 @@ async fn evaluate_flake( ); } + if result.jobs.is_empty() && result.error_count == 0 { + let stderr = String::from_utf8_lossy(&out.stderr); + if !stderr.trim().is_empty() { + tracing::warn!( + stderr = %stderr, + "nix-eval-jobs returned no jobs, stderr output present" + ); + } + } + Ok(result) }, _ => { @@ -163,7 +206,7 @@ async fn evaluate_legacy( tokio::time::timeout(timeout, async { // Try nix-eval-jobs without --flake for legacy expressions let mut cmd = tokio::process::Command::new("nix-eval-jobs"); - cmd.arg(&expr_path); + cmd.arg(&expr_path).arg("--force-recurse"); if config.restrict_eval { cmd.args(["--option", "restrict-eval", "true"]); diff --git a/crates/evaluator/tests/eval_tests.rs b/crates/evaluator/tests/eval_tests.rs index ccf0848..7c80d7d 100644 --- a/crates/evaluator/tests/eval_tests.rs +++ b/crates/evaluator/tests/eval_tests.rs @@ -87,6 +87,31 @@ fn test_parse_error_without_name() { assert_eq!(result.error_count, 1); } +#[test] +fn test_parse_nix_eval_jobs_attr_field() { + // nix-eval-jobs uses "attr" instead of "name" for the job identifier + let line = r#"{"attr":"x86_64-linux.hello","drvPath":"/nix/store/abc123-hello.drv","system":"x86_64-linux"}"#; + let result = fc_evaluator::nix::parse_eval_output(line); + assert_eq!(result.jobs.len(), 1); + assert_eq!(result.jobs[0].name, "x86_64-linux.hello"); + assert_eq!(result.jobs[0].drv_path, "/nix/store/abc123-hello.drv"); +} + +#[test] +fn test_parse_nix_eval_jobs_both_attr_and_name() { + // nix-eval-jobs with --force-recurse outputs both "attr" and "name" fields. + // "attr" is the attribute path, "name" is the derivation name. We prefer + // "attr" as the job identifier. + let line = r#"{"attr":"x86_64-linux.hello","attrPath":["x86_64-linux","hello"],"drvPath":"/nix/store/abc123-hello.drv","name":"fc-test-hello","outputs":{"out":"/nix/store/abc123-hello"},"system":"x86_64-linux"}"#; + let result = fc_evaluator::nix::parse_eval_output(line); + assert_eq!(result.jobs.len(), 1); + assert_eq!(result.jobs[0].name, "x86_64-linux.hello"); + assert_eq!(result.jobs[0].drv_path, "/nix/store/abc123-hello.drv"); + assert_eq!(result.jobs[0].system.as_deref(), Some("x86_64-linux")); + let outputs = result.jobs[0].outputs.as_ref().unwrap(); + assert_eq!(outputs.get("out").unwrap(), "/nix/store/abc123-hello"); +} + // --- Inputs hash computation --- #[test] diff --git a/crates/queue-runner/src/worker.rs b/crates/queue-runner/src/worker.rs index 099ec9c..b77d51f 100644 --- a/crates/queue-runner/src/worker.rs +++ b/crates/queue-runner/src/worker.rs @@ -27,6 +27,7 @@ use tokio::sync::Semaphore; pub struct WorkerPool { semaphore: Arc, + worker_count: usize, pool: PgPool, work_dir: Arc, build_timeout: Duration, @@ -57,6 +58,7 @@ impl WorkerPool { let alert_manager = alert_config.map(AlertManager::new); Self { semaphore: Arc::new(Semaphore::new(workers)), + worker_count: workers, pool: db_pool, work_dir: Arc::new(work_dir), build_timeout, @@ -79,7 +81,7 @@ impl WorkerPool { /// Wait until all in-flight builds complete (semaphore fully available). pub async fn wait_for_drain(&self) { // Acquire all permits = all workers idle - let workers = self.semaphore.available_permits() + 1; // at least 1 + let workers = self.worker_count; let _ = tokio::time::timeout( Duration::from_secs(self.build_timeout.as_secs() + 60), async { @@ -645,6 +647,11 @@ async fn run_build( max = build.max_retries, "Build failed, scheduling retry" ); + // Clean up old build steps before retry + sqlx::query("DELETE FROM build_steps WHERE build_id = $1") + .bind(build.id) + .execute(pool) + .await?; sqlx::query( "UPDATE builds SET status = 'pending', started_at = NULL, \ retry_count = retry_count + 1, completed_at = NULL WHERE id = $1", diff --git a/crates/server/src/auth_middleware.rs b/crates/server/src/auth_middleware.rs index 1f9774c..bf5e383 100644 --- a/crates/server/src/auth_middleware.rs +++ b/crates/server/src/auth_middleware.rs @@ -198,14 +198,36 @@ impl RequireRoles { } /// Session extraction middleware for dashboard routes. -/// Reads `fc_user_session` or `fc_session` cookie and inserts User/ApiKey into -/// extensions if valid. +/// Reads `fc_user_session` or `fc_session` cookie, or Bearer token (API key), +/// and inserts User/ApiKey into extensions if valid. pub async fn extract_session( State(state): State, mut request: Request, next: Next, ) -> Response { - // Extract cookie header first, then clone to end the borrow + // Try Bearer token first (API key auth) + let auth_header = request + .headers() + .get("authorization") + .and_then(|v| v.to_str().ok()) + .map(String::from); + + if let Some(ref auth_header) = auth_header { + if let Some(token) = auth_header.strip_prefix("Bearer ") { + use sha2::{Digest, Sha256}; + let mut hasher = Sha256::new(); + hasher.update(token.as_bytes()); + let key_hash = hex::encode(hasher.finalize()); + + if let Ok(Some(api_key)) = + fc_common::repo::api_keys::get_by_hash(&state.pool, &key_hash).await + { + request.extensions_mut().insert(api_key.clone()); + } + } + } + + // Extract cookie header next let cookie_header = request .headers() .get("cookie") diff --git a/crates/server/src/routes/cache.rs b/crates/server/src/routes/cache.rs index 9d7292f..ce63418 100644 --- a/crates/server/src/routes/cache.rs +++ b/crates/server/src/routes/cache.rs @@ -10,6 +10,54 @@ use tokio::process::Command; use crate::{error::ApiError, state::AppState}; +/// Extract the first path info entry from `nix path-info --json` output, +/// handling both the old array format (`[{"path":...}]`) and the new +/// object-keyed format (`{"/nix/store/...": {...}}`). +fn first_path_info_entry( + parsed: &serde_json::Value, +) -> Option<(&serde_json::Value, Option<&str>)> { + if let Some(arr) = parsed.as_array() { + let entry = arr.first()?; + let path = entry.get("path").and_then(|v| v.as_str()); + Some((entry, path)) + } else if let Some(obj) = parsed.as_object() { + let (key, val) = obj.iter().next()?; + Some((val, Some(key.as_str()))) + } else { + None + } +} + +/// Look up a store path by its nix hash, checking both build_products and +/// builds tables. +async fn find_store_path( + pool: &sqlx::PgPool, + hash: &str, +) -> std::result::Result, ApiError> { + let like_pattern = format!("/nix/store/{hash}-%"); + + let path: Option = sqlx::query_scalar( + "SELECT path FROM build_products WHERE path LIKE $1 LIMIT 1", + ) + .bind(&like_pattern) + .fetch_optional(pool) + .await + .map_err(|e| ApiError(fc_common::CiError::Database(e)))?; + + if path.is_some() { + return Ok(path); + } + + sqlx::query_scalar( + "SELECT build_output_path FROM builds WHERE build_output_path LIKE $1 \ + LIMIT 1", + ) + .bind(&like_pattern) + .fetch_optional(pool) + .await + .map_err(|e| ApiError(fc_common::CiError::Database(e))) +} + /// Serve `NARInfo` for a store path hash. /// GET /nix-cache/{hash}.narinfo async fn narinfo( @@ -27,27 +75,14 @@ async fn narinfo( return Ok(StatusCode::NOT_FOUND.into_response()); } - // Look up the store path from build_products by matching the hash prefix - let product = sqlx::query_as::<_, fc_common::models::BuildProduct>( - "SELECT * FROM build_products WHERE path LIKE $1 LIMIT 1", - ) - .bind(format!("/nix/store/{hash}-%")) - .fetch_optional(&state.pool) - .await - .map_err(|e| ApiError(fc_common::CiError::Database(e)))?; - - let product = match product { - Some(p) => p, - None => return Ok(StatusCode::NOT_FOUND.into_response()), + let store_path = match find_store_path(&state.pool, hash).await? { + Some(p) if fc_common::validate::is_valid_store_path(&p) => p, + _ => return Ok(StatusCode::NOT_FOUND.into_response()), }; - if !fc_common::validate::is_valid_store_path(&product.path) { - return Ok(StatusCode::NOT_FOUND.into_response()); - } - // Get narinfo from nix path-info let output = Command::new("nix") - .args(["path-info", "--json", &product.path]) + .args(["path-info", "--json", &store_path]) .output() .await; @@ -62,7 +97,7 @@ async fn narinfo( Err(_) => return Ok(StatusCode::NOT_FOUND.into_response()), }; - let entry = match parsed.as_array().and_then(|a| a.first()) { + let (entry, path_from_info) = match first_path_info_entry(&parsed) { Some(e) => e, None => return Ok(StatusCode::NOT_FOUND.into_response()), }; @@ -72,10 +107,7 @@ async fn narinfo( .get("narSize") .and_then(serde_json::Value::as_u64) .unwrap_or(0); - let store_path = entry - .get("path") - .and_then(|v| v.as_str()) - .unwrap_or(&product.path); + let store_path = path_from_info.unwrap_or(&store_path); let refs: Vec<&str> = entry .get("references") @@ -174,11 +206,8 @@ async fn sign_narinfo(narinfo: &str, key_file: &std::path::Path) -> String { if let Ok(o) = re_output && let Ok(parsed) = serde_json::from_slice::(&o.stdout) - && let Some(sigs) = parsed - .as_array() - .and_then(|a| a.first()) - .and_then(|e| e.get("signatures")) - .and_then(|v| v.as_array()) + && let Some((entry, _)) = first_path_info_entry(&parsed) + && let Some(sigs) = entry.get("signatures").and_then(|v| v.as_array()) { let sig_lines: Vec = sigs .iter() @@ -214,26 +243,14 @@ async fn serve_nar_zst( return Ok(StatusCode::NOT_FOUND.into_response()); } - let product = sqlx::query_as::<_, fc_common::models::BuildProduct>( - "SELECT * FROM build_products WHERE path LIKE $1 LIMIT 1", - ) - .bind(format!("/nix/store/{hash}-%")) - .fetch_optional(&state.pool) - .await - .map_err(|e| ApiError(fc_common::CiError::Database(e)))?; - - let product = match product { - Some(p) => p, - None => return Ok(StatusCode::NOT_FOUND.into_response()), + let store_path = match find_store_path(&state.pool, hash).await? { + Some(p) if fc_common::validate::is_valid_store_path(&p) => p, + _ => return Ok(StatusCode::NOT_FOUND.into_response()), }; - if !fc_common::validate::is_valid_store_path(&product.path) { - return Ok(StatusCode::NOT_FOUND.into_response()); - } - // Use two piped processes instead of sh -c to prevent command injection let mut nix_child = std::process::Command::new("nix") - .args(["store", "dump-path", &product.path]) + .args(["store", "dump-path", &store_path]) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::null()) .spawn() @@ -290,25 +307,13 @@ async fn serve_nar( return Ok(StatusCode::NOT_FOUND.into_response()); } - let product = sqlx::query_as::<_, fc_common::models::BuildProduct>( - "SELECT * FROM build_products WHERE path LIKE $1 LIMIT 1", - ) - .bind(format!("/nix/store/{hash}-%")) - .fetch_optional(&state.pool) - .await - .map_err(|e| ApiError(fc_common::CiError::Database(e)))?; - - let product = match product { - Some(p) => p, - None => return Ok(StatusCode::NOT_FOUND.into_response()), + let store_path = match find_store_path(&state.pool, hash).await? { + Some(p) if fc_common::validate::is_valid_store_path(&p) => p, + _ => return Ok(StatusCode::NOT_FOUND.into_response()), }; - if !fc_common::validate::is_valid_store_path(&product.path) { - return Ok(StatusCode::NOT_FOUND.into_response()); - } - let child = Command::new("nix") - .args(["store", "dump-path", &product.path]) + .args(["store", "dump-path", &store_path]) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::null()) .spawn(); diff --git a/crates/server/src/routes/channels.rs b/crates/server/src/routes/channels.rs index e8e7f76..2cc9582 100644 --- a/crates/server/src/routes/channels.rs +++ b/crates/server/src/routes/channels.rs @@ -50,9 +50,29 @@ async fn create_channel( input .validate() .map_err(|msg| ApiError(fc_common::CiError::Validation(msg)))?; + let jobset_id = input.jobset_id; let channel = fc_common::repo::channels::create(&state.pool, input) .await .map_err(ApiError)?; + + // Catch-up: if the jobset already has a completed evaluation, promote now + if let Ok(Some(eval)) = + fc_common::repo::evaluations::get_latest(&state.pool, jobset_id).await + { + if eval.status == fc_common::models::EvaluationStatus::Completed { + let _ = fc_common::repo::channels::auto_promote_if_complete( + &state.pool, + jobset_id, + eval.id, + ) + .await; + } + } + + // Re-fetch to include any promotion + let channel = fc_common::repo::channels::get(&state.pool, channel.id) + .await + .map_err(ApiError)?; Ok(Json(channel)) } diff --git a/crates/server/src/routes/dashboard.rs b/crates/server/src/routes/dashboard.rs index bbbbf3b..d834a6b 100644 --- a/crates/server/src/routes/dashboard.rs +++ b/crates/server/src/routes/dashboard.rs @@ -3,7 +3,7 @@ use axum::{ Form, Router, extract::{Path, Query, State}, - http::Extensions, + http::{Extensions, StatusCode}, response::{Html, IntoResponse, Redirect, Response}, routing::get, }; @@ -1271,12 +1271,15 @@ async fn login_action( let tmpl = LoginTemplate { error: Some("Invalid username or password".to_string()), }; - return Html( - tmpl - .render() - .unwrap_or_else(|e| format!("Template error: {e}")), + return ( + StatusCode::UNAUTHORIZED, + Html( + tmpl + .render() + .unwrap_or_else(|e| format!("Template error: {e}")), + ), ) - .into_response(); + .into_response(); } } diff --git a/crates/server/src/routes/metrics.rs b/crates/server/src/routes/metrics.rs index e3000c3..2407a13 100644 --- a/crates/server/src/routes/metrics.rs +++ b/crates/server/src/routes/metrics.rs @@ -54,6 +54,15 @@ struct SystemDistributionResponse { counts: Vec, } +/// Escape a string for use as a Prometheus label value. +/// Per the exposition format, backslash, double-quote, and newline must be +/// escaped. +fn escape_prometheus_label(s: &str) -> String { + s.replace('\\', "\\\\") + .replace('"', "\\\"") + .replace('\n', "\\n") +} + async fn prometheus_metrics(State(state): State) -> Response { let stats = match fc_common::repo::builds::get_stats(&state.pool).await { Ok(s) => s, @@ -216,8 +225,9 @@ async fn prometheus_metrics(State(state): State) -> Response { ); output.push_str("# TYPE fc_project_builds_completed gauge\n"); for (name, completed, _) in &per_project { + let escaped = escape_prometheus_label(name); output.push_str(&format!( - "fc_project_builds_completed{{project=\"{name}\"}} {completed}\n" + "fc_project_builds_completed{{project=\"{escaped}\"}} {completed}\n" )); } output.push_str( @@ -225,8 +235,9 @@ async fn prometheus_metrics(State(state): State) -> Response { ); output.push_str("# TYPE fc_project_builds_failed gauge\n"); for (name, _, failed) in &per_project { + let escaped = escape_prometheus_label(name); output.push_str(&format!( - "fc_project_builds_failed{{project=\"{name}\"}} {failed}\n" + "fc_project_builds_failed{{project=\"{escaped}\"}} {failed}\n" )); } } diff --git a/crates/server/src/routes/mod.rs b/crates/server/src/routes/mod.rs index 191a373..2e06562 100644 --- a/crates/server/src/routes/mod.rs +++ b/crates/server/src/routes/mod.rs @@ -45,7 +45,6 @@ static STYLE_CSS: &str = include_str!("../../static/style.css"); struct RateLimitState { requests: DashMap>, - _rps: u64, burst: u32, last_cleanup: std::sync::atomic::AtomicU64, } @@ -180,9 +179,9 @@ pub fn router(state: AppState, config: &ServerConfig) -> Router { if let (Some(rps), Some(burst)) = (config.rate_limit_rps, config.rate_limit_burst) { + let _ = rps; // rate_limit_rps reserved for future use let rl_state = Arc::new(RateLimitState { requests: DashMap::new(), - _rps: rps, burst, last_cleanup: std::sync::atomic::AtomicU64::new(0), }); diff --git a/crates/server/static/style.css b/crates/server/static/style.css index cb763da..9e8f75c 100644 --- a/crates/server/static/style.css +++ b/crates/server/static/style.css @@ -1,11 +1,7 @@ -/* FC CI — Design System v2 */ - -/* ================================================================ - Color Tokens - ================================================================ */ +/* Color Tokens */ :root { /* Neutral scale (warm gray) */ - --gray-50: #fafafa; + --gray-50: #fafafa; --gray-100: #f4f4f5; --gray-200: #e4e4e7; --gray-300: #d4d4d8; @@ -17,8 +13,8 @@ --gray-900: #18181b; --gray-950: #09090b; - /* Accent — indigo */ - --accent-50: #eef2ff; + /* Accent, indigo */ + --accent-50: #eef2ff; --accent-100: #e0e7ff; --accent-200: #c7d2fe; --accent-400: #818cf8; @@ -29,113 +25,116 @@ /* Semantic */ --green-500: #22c55e; --green-600: #16a34a; - --green-50: #f0fdf4; - --red-500: #ef4444; - --red-600: #dc2626; - --red-50: #fef2f2; + --green-50: #f0fdf4; + --red-500: #ef4444; + --red-600: #dc2626; + --red-50: #fef2f2; --amber-500: #f59e0b; --amber-600: #d97706; - --amber-50: #fffbeb; - --sky-500: #0ea5e9; - --sky-50: #f0f9ff; + --amber-50: #fffbeb; + --sky-500: #0ea5e9; + --sky-50: #f0f9ff; /* Light theme surfaces */ - --bg: #ffffff; - --bg-subtle: var(--gray-50); - --surface: #ffffff; - --surface-hover: var(--gray-50); + --bg: #ffffff; + --bg-subtle: var(--gray-50); + --surface: #ffffff; + --surface-hover: var(--gray-50); --surface-raised: #ffffff; - --border: var(--gray-200); - --border-subtle: var(--gray-100); + --border: var(--gray-200); + --border-subtle: var(--gray-100); /* Light theme text */ - --fg: var(--gray-900); + --fg: var(--gray-900); --fg-secondary: var(--gray-600); --fg-muted: var(--gray-500); --fg-faint: var(--gray-400); /* Light theme accent */ - --accent: var(--accent-600); + --accent: var(--accent-600); --accent-hover: var(--accent-700); - --accent-bg: var(--accent-50); - --accent-fg: var(--accent-600); + --accent-bg: var(--accent-50); + --accent-fg: var(--accent-600); /* Light theme semantic backgrounds */ - --green-bg: var(--green-50); - --green-fg: var(--green-600); - --red-bg: var(--red-50); - --red-fg: var(--red-600); - --amber-bg: var(--amber-50); - --amber-fg: var(--amber-600); - --sky-bg: var(--sky-50); - --sky-fg: var(--sky-500); + --green-bg: var(--green-50); + --green-fg: var(--green-600); + --red-bg: var(--red-50); + --red-fg: var(--red-600); + --amber-bg: var(--amber-50); + --amber-fg: var(--amber-600); + --sky-bg: var(--sky-50); + --sky-fg: var(--sky-500); /* Shadows */ - --shadow-xs: 0 1px 2px rgb(0 0 0 / .04); - --shadow-sm: 0 1px 3px rgb(0 0 0 / .06), 0 1px 2px rgb(0 0 0 / .04); - --shadow-md: 0 4px 6px -1px rgb(0 0 0 / .06), 0 2px 4px -2px rgb(0 0 0 / .04); + --shadow-xs: 0 1px 2px rgb(0 0 0 / 0.04); + --shadow-sm: 0 1px 3px rgb(0 0 0 / 0.06), 0 1px 2px rgb(0 0 0 / 0.04); + --shadow-md: + 0 4px 6px -1px rgb(0 0 0 / 0.06), 0 2px 4px -2px rgb(0 0 0 / 0.04); /* Shape */ --radius-sm: 6px; - --radius: 8px; + --radius: 8px; --radius-lg: 12px; --radius-xl: 16px; } -/* ================================================================ - Dark Theme - ================================================================ */ +/* Dark */ @media (prefers-color-scheme: dark) { :root { - --bg: var(--gray-950); - --bg-subtle: var(--gray-900); - --surface: var(--gray-900); - --surface-hover: var(--gray-800); + --bg: var(--gray-950); + --bg-subtle: var(--gray-900); + --surface: var(--gray-900); + --surface-hover: var(--gray-800); --surface-raised: var(--gray-800); - --border: var(--gray-800); - --border-subtle: var(--gray-900); + --border: var(--gray-800); + --border-subtle: var(--gray-900); - --fg: var(--gray-50); + --fg: var(--gray-50); --fg-secondary: var(--gray-400); - --fg-muted: var(--gray-500); - --fg-faint: var(--gray-600); + --fg-muted: var(--gray-500); + --fg-faint: var(--gray-600); - --accent: var(--accent-400); + --accent: var(--accent-400); --accent-hover: var(--accent-200); - --accent-bg: rgb(99 102 241 / .12); - --accent-fg: var(--accent-400); + --accent-bg: rgb(99 102 241 / 0.12); + --accent-fg: var(--accent-400); - --green-bg: rgb(34 197 94 / .12); + --green-bg: rgb(34 197 94 / 0.12); --green-fg: var(--green-500); - --red-bg: rgb(239 68 68 / .12); - --red-fg: var(--red-500); - --amber-bg: rgb(245 158 11 / .12); + --red-bg: rgb(239 68 68 / 0.12); + --red-fg: var(--red-500); + --amber-bg: rgb(245 158 11 / 0.12); --amber-fg: var(--amber-500); - --sky-bg: rgb(14 165 233 / .12); - --sky-fg: var(--sky-500); + --sky-bg: rgb(14 165 233 / 0.12); + --sky-fg: var(--sky-500); - --shadow-xs: 0 1px 2px rgb(0 0 0 / .2); - --shadow-sm: 0 1px 3px rgb(0 0 0 / .3), 0 1px 2px rgb(0 0 0 / .2); - --shadow-md: 0 4px 6px -1px rgb(0 0 0 / .3), 0 2px 4px -2px rgb(0 0 0 / .2); + --shadow-xs: 0 1px 2px rgb(0 0 0 / 0.2); + --shadow-sm: 0 1px 3px rgb(0 0 0 / 0.3), 0 1px 2px rgb(0 0 0 / 0.2); + --shadow-md: + 0 4px 6px -1px rgb(0 0 0 / 0.3), 0 2px 4px -2px rgb(0 0 0 / 0.2); } } -/* ================================================================ - Reset & Base - ================================================================ */ -*, *::before, *::after { +/* Reset & Base */ +*, +*::before, +*::after { margin: 0; padding: 0; box-sizing: border-box; } -html { height: 100%; } +html { + height: 100%; +} body { min-height: 100vh; display: flex; flex-direction: column; - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Inter, Roboto, sans-serif; + font-family: + -apple-system, BlinkMacSystemFont, "Segoe UI", Inter, Roboto, sans-serif; font-size: 14px; line-height: 1.6; color: var(--fg); @@ -146,31 +145,33 @@ body { a { color: var(--accent); text-decoration: none; - transition: color .1s; + transition: color 0.1s; +} +a:hover { + color: var(--accent-hover); } -a:hover { color: var(--accent-hover); } -code, pre { - font-family: "SF Mono", "Cascadia Code", "JetBrains Mono", Menlo, Consolas, monospace; +code, +pre { + font-family: + "SF Mono", "Cascadia Code", "JetBrains Mono", Menlo, Consolas, monospace; font-size: 0.8125em; } code { background: var(--bg-subtle); border: 1px solid var(--border); - padding: .1em .35em; + padding: 0.1em 0.35em; border-radius: 4px; } pre { background: var(--bg-subtle); border: 1px solid var(--border); border-radius: var(--radius); - padding: .75rem 1rem; + padding: 0.75rem 1rem; overflow-x: auto; } -/* ================================================================ - Layout - ================================================================ */ +/* Layout */ .page-main { flex: 1; width: 100%; @@ -183,9 +184,7 @@ pre { padding: 1.5rem 1.5rem 0; } -/* ================================================================ - Navigation - ================================================================ */ +/* Navigation */ .navbar { position: sticky; top: 0; @@ -204,10 +203,12 @@ pre { font-weight: 700; font-size: 0.9375rem; color: var(--fg); - letter-spacing: -.02em; + letter-spacing: -0.02em; margin-right: 0.5rem; } -.nav-brand a:hover { color: var(--fg); } +.nav-brand a:hover { + color: var(--fg); +} .nav-links { display: flex; @@ -218,12 +219,14 @@ pre { display: inline-flex; align-items: center; height: 2rem; - padding: 0 .625rem; + padding: 0 0.625rem; border-radius: var(--radius-sm); - font-size: .8125rem; + font-size: 0.8125rem; font-weight: 500; color: var(--fg-secondary); - transition: color .1s, background .1s; + transition: + color 0.1s, + background 0.1s; } .nav-links a:hover { color: var(--fg); @@ -236,25 +239,29 @@ pre { .nav-auth { display: flex; - gap: .5rem; + gap: 0.5rem; align-items: center; - font-size: .8125rem; + font-size: 0.8125rem; +} +.nav-auth .auth-user { + color: var(--fg-muted); +} +.nav-auth form { + display: inline; } -.nav-auth .auth-user { color: var(--fg-muted); } -.nav-auth form { display: inline; } .nav-auth button { background: none; border: none; color: var(--accent); cursor: pointer; - font-size: .8125rem; + font-size: 0.8125rem; font-family: inherit; } -.nav-auth button:hover { color: var(--accent-hover); } +.nav-auth button:hover { + color: var(--accent-hover); +} -/* ================================================================ - Footer - ================================================================ */ +/* Footer */ .footer { display: flex; align-items: center; @@ -262,38 +269,36 @@ pre { height: 2.75rem; border-top: 1px solid var(--border); color: var(--fg-faint); - font-size: .75rem; + font-size: 0.75rem; margin-top: auto; } -/* ================================================================ - Typography - ================================================================ */ +/* Typography */ h1 { font-size: 1.5rem; font-weight: 700; - letter-spacing: -.03em; + letter-spacing: -0.03em; margin-bottom: 1.25rem; color: var(--fg); } h2 { font-size: 1rem; font-weight: 600; - letter-spacing: -.01em; - margin: 1.75rem 0 .75rem; + letter-spacing: -0.01em; + margin: 1.75rem 0 0.75rem; color: var(--fg); } -h2:first-child { margin-top: 0; } +h2:first-child { + margin-top: 0; +} h3 { - font-size: .875rem; + font-size: 0.875rem; font-weight: 600; - margin: 1rem 0 .5rem; + margin: 1rem 0 0.5rem; color: var(--fg); } -/* ================================================================ - Cards - ================================================================ */ +/* Cards */ .card { background: var(--surface); border: 1px solid var(--border); @@ -302,76 +307,85 @@ h3 { overflow: hidden; } .card-header { - padding: .75rem 1rem; + padding: 0.75rem 1rem; border-bottom: 1px solid var(--border); font-weight: 600; - font-size: .8125rem; + font-size: 0.8125rem; color: var(--fg-secondary); } .card-body { padding: 1rem; } -/* ================================================================ - Dashboard Grid - ================================================================ */ +/* Dashboard Grid */ .dashboard-grid { display: grid; grid-template-columns: 1fr 1fr; gap: 1.5rem; } -/* ================================================================ - Stats - ================================================================ */ +/* Stats */ .stats-grid { display: grid; grid-template-columns: repeat(auto-fill, minmax(140px, 1fr)); - gap: .5rem; + gap: 0.5rem; margin-bottom: 1.5rem; } .stat-card { background: var(--surface); border: 1px solid var(--border); border-radius: var(--radius); - padding: .875rem 1rem; + padding: 0.875rem 1rem; box-shadow: var(--shadow-xs); } .stat-value { font-size: 1.625rem; font-weight: 700; - letter-spacing: -.03em; + letter-spacing: -0.03em; line-height: 1.2; color: var(--fg); } .stat-label { - font-size: .6875rem; + font-size: 0.6875rem; font-weight: 500; color: var(--fg-muted); text-transform: uppercase; - letter-spacing: .05em; - margin-top: .25rem; + letter-spacing: 0.05em; + margin-top: 0.25rem; } -.stat-value-green { color: var(--green-fg); } -.stat-value-red { color: var(--red-fg); } -.stat-value-yellow { color: var(--amber-fg); } +.stat-value-green { + color: var(--green-fg); +} +.stat-value-red { + color: var(--red-fg); +} +.stat-value-yellow { + color: var(--amber-fg); +} .success-rate { display: inline-flex; align-items: center; - padding: .125em .5em; + padding: 0.125em 0.5em; border-radius: 999px; - font-size: .8125rem; + font-size: 0.8125rem; font-weight: 700; } -.success-rate-high { background: var(--green-bg); color: var(--green-fg); } -.success-rate-mid { background: var(--amber-bg); color: var(--amber-fg); } -.success-rate-low { background: var(--red-bg); color: var(--red-fg); } +.success-rate-high { + background: var(--green-bg); + color: var(--green-fg); +} +.success-rate-mid { + background: var(--amber-bg); + color: var(--amber-fg); +} +.success-rate-low { + background: var(--red-bg); + color: var(--red-fg); +} -/* ================================================================ - Tables - ================================================================ */ +/* Tables */ .table-wrap { background: var(--surface); border: 1px solid var(--border); @@ -383,66 +397,100 @@ h3 { table { width: 100%; border-collapse: collapse; - font-size: .8125rem; + font-size: 0.8125rem; } -th, td { - padding: .5625rem .75rem; +th, +td { + padding: 0.5625rem 0.75rem; text-align: left; border-bottom: 1px solid var(--border-subtle); } th { background: var(--bg-subtle); font-weight: 600; - font-size: .6875rem; + font-size: 0.6875rem; text-transform: uppercase; - letter-spacing: .04em; + letter-spacing: 0.04em; color: var(--fg-muted); border-bottom-color: var(--border); } -tbody tr:last-child td { border-bottom: none; } -tbody tr:hover { background: var(--surface-hover); } +tbody tr:last-child td { + border-bottom: none; +} +tbody tr:hover { + background: var(--surface-hover); +} -.table-responsive { overflow-x: auto; } +.table-responsive { + overflow-x: auto; +} -/* ================================================================ - Badges / Status - ================================================================ */ +/* Badges / Status */ .badge { display: inline-flex; align-items: center; height: 1.375rem; - padding: 0 .5rem; + padding: 0 0.5rem; border-radius: 999px; - font-size: .6875rem; + font-size: 0.6875rem; font-weight: 600; text-transform: capitalize; white-space: nowrap; } -.badge-completed { background: var(--green-bg); color: var(--green-fg); } -.badge-failed { background: var(--red-bg); color: var(--red-fg); } -.badge-running { background: var(--amber-bg); color: var(--amber-fg); } -.badge-pending { background: var(--sky-bg); color: var(--sky-fg); } -.badge-cancelled { background: var(--bg-subtle); color: var(--fg-faint); } +.badge-completed { + background: var(--green-bg); + color: var(--green-fg); +} +.badge-failed { + background: var(--red-bg); + color: var(--red-fg); +} +.badge-running { + background: var(--amber-bg); + color: var(--amber-fg); +} +.badge-pending { + background: var(--sky-bg); + color: var(--sky-fg); +} +.badge-cancelled { + background: var(--bg-subtle); + color: var(--fg-faint); +} .status-dot { display: inline-block; width: 8px; height: 8px; border-radius: 50%; - margin-right: .25rem; + margin-right: 0.25rem; +} +.status-dot-green { + background: var(--green-fg); +} +.status-dot-red { + background: var(--red-fg); +} +.status-dot-yellow { + background: var(--amber-fg); +} +.status-dot-gray { + background: var(--fg-faint); } -.status-dot-green { background: var(--green-fg); } -.status-dot-red { background: var(--red-fg); } -.status-dot-yellow { background: var(--amber-fg); } -.status-dot-gray { background: var(--fg-faint); } -.step-success { color: var(--green-fg); font-weight: 600; } -.step-failure { color: var(--red-fg); font-weight: 600; } +.step-success { + color: var(--green-fg); + font-weight: 600; +} +.step-failure { + color: var(--red-fg); + font-weight: 600; +} .empty { color: var(--fg-muted); - font-size: .8125rem; + font-size: 0.8125rem; padding: 2.5rem 1.5rem; text-align: center; background: var(--surface); @@ -450,23 +498,21 @@ tbody tr:hover { background: var(--surface-hover); } border-radius: var(--radius-lg); } .empty-title { - font-size: .875rem; + font-size: 0.875rem; font-weight: 600; color: var(--fg-secondary); - margin-bottom: .25rem; + margin-bottom: 0.25rem; } .empty-hint { color: var(--fg-faint); - font-size: .75rem; - margin-top: .375rem; + font-size: 0.75rem; + margin-top: 0.375rem; } -/* ================================================================ - Quick Actions - ================================================================ */ +/* Quick Actions */ .quick-actions { display: flex; - gap: .5rem; + gap: 0.5rem; margin-bottom: 1.5rem; flex-wrap: wrap; } @@ -474,68 +520,77 @@ tbody tr:hover { background: var(--surface-hover); } display: inline-flex; align-items: center; height: 2rem; - padding: 0 .75rem; + padding: 0 0.75rem; background: var(--surface); border: 1px solid var(--border); border-radius: var(--radius-sm); - font-size: .8125rem; + font-size: 0.8125rem; font-weight: 500; color: var(--fg-secondary); - transition: border-color .1s, color .1s; + transition: + border-color 0.1s, + color 0.1s; } .quick-actions a:hover { border-color: var(--accent); color: var(--accent); } -/* ================================================================ - Queue Summary - ================================================================ */ +/* Queue Summary */ .queue-summary { display: inline-flex; align-items: center; - gap: .75rem; + gap: 0.75rem; height: 2rem; - padding: 0 .75rem; + padding: 0 0.75rem; background: var(--surface); border: 1px solid var(--border); border-radius: var(--radius-sm); - font-size: .8125rem; + font-size: 0.8125rem; margin-bottom: 1rem; } -/* ================================================================ - Breadcrumbs - ================================================================ */ +/* Breadcrumbs */ .breadcrumbs { display: flex; align-items: center; - gap: .3rem; + gap: 0.3rem; margin-bottom: 1rem; - font-size: .8125rem; + font-size: 0.8125rem; color: var(--fg-muted); } -.breadcrumbs a { color: var(--fg-muted); } -.breadcrumbs a:hover { color: var(--accent); } -.breadcrumbs .sep { color: var(--fg-faint); } -.breadcrumbs .current { color: var(--fg); font-weight: 600; } +.breadcrumbs a { + color: var(--fg-muted); +} +.breadcrumbs a:hover { + color: var(--accent); +} +.breadcrumbs .sep { + color: var(--fg-faint); +} +.breadcrumbs .current { + color: var(--fg); + font-weight: 600; +} -/* ================================================================ - Detail Grid (key-value) - ================================================================ */ +/* Detail Grid (key-value) */ .detail-grid { display: grid; grid-template-columns: auto 1fr; - gap: .375rem 1rem; + gap: 0.375rem 1rem; margin-bottom: 1.25rem; - font-size: .8125rem; + font-size: 0.8125rem; +} +.detail-grid dt { + font-weight: 500; + color: var(--fg-muted); +} +.detail-grid dd { + color: var(--fg); + word-break: break-all; } -.detail-grid dt { font-weight: 500; color: var(--fg-muted); } -.detail-grid dd { color: var(--fg); word-break: break-all; } -/* ================================================================ - Tabs - ================================================================ */ +/* Tabs */ .tab-nav { display: flex; gap: 0; @@ -546,27 +601,29 @@ tbody tr:hover { background: var(--surface-hover); } display: inline-flex; align-items: center; height: 2.25rem; - padding: 0 .875rem; - font-size: .8125rem; + padding: 0 0.875rem; + font-size: 0.8125rem; font-weight: 500; color: var(--fg-muted); border-bottom: 2px solid transparent; margin-bottom: -1px; - transition: color .1s, border-color .1s; + transition: + color 0.1s, + border-color 0.1s; +} +.tab-nav a:hover { + color: var(--fg); } -.tab-nav a:hover { color: var(--fg); } .tab-nav a.active { color: var(--accent-fg); border-bottom-color: var(--accent); font-weight: 600; } -/* ================================================================ - Filter Form - ================================================================ */ +/* Filter Form */ .filter-form { display: flex; - gap: .625rem; + gap: 0.625rem; align-items: flex-end; margin-bottom: 1rem; flex-wrap: wrap; @@ -574,18 +631,18 @@ tbody tr:hover { background: var(--surface-hover); } .filter-form label { display: flex; flex-direction: column; - gap: .1875rem; - font-size: .75rem; + gap: 0.1875rem; + font-size: 0.75rem; font-weight: 500; color: var(--fg-muted); } .filter-form select, .filter-form input[type="text"] { height: 2rem; - padding: 0 .5rem; + padding: 0 0.5rem; border: 1px solid var(--border); border-radius: var(--radius-sm); - font-size: .8125rem; + font-size: 0.8125rem; font-family: inherit; background: var(--surface); color: var(--fg); @@ -598,49 +655,49 @@ tbody tr:hover { background: var(--surface-hover); } } .filter-form button { height: 2rem; - padding: 0 .875rem; + padding: 0 0.875rem; background: var(--accent); color: #fff; border: none; border-radius: var(--radius-sm); cursor: pointer; - font-size: .8125rem; + font-size: 0.8125rem; font-weight: 500; font-family: inherit; - transition: opacity .1s; + transition: opacity 0.1s; +} +.filter-form button:hover { + opacity: 0.9; } -.filter-form button:hover { opacity: 0.9; } -/* ================================================================ - Pagination - ================================================================ */ +/* Pagination */ .pagination { display: flex; align-items: center; - gap: .75rem; + gap: 0.75rem; margin-top: 1rem; - font-size: .8125rem; + font-size: 0.8125rem; color: var(--fg-muted); } .pagination a { display: inline-flex; align-items: center; height: 1.75rem; - padding: 0 .625rem; + padding: 0 0.625rem; border: 1px solid var(--border); border-radius: var(--radius-sm); - font-size: .8125rem; + font-size: 0.8125rem; color: var(--fg-secondary); - transition: border-color .1s, color .1s; + transition: + border-color 0.1s, + color 0.1s; } .pagination a:hover { border-color: var(--accent); color: var(--accent); } -/* ================================================================ - Forms - ================================================================ */ +/* Forms */ .form-card { background: var(--surface); border: 1px solid var(--border); @@ -651,26 +708,30 @@ tbody tr:hover { background: var(--surface-hover); } box-shadow: var(--shadow-xs); } -.form-group { margin-bottom: .75rem; } +.form-group { + margin-bottom: 0.75rem; +} .form-group label { display: block; - font-size: .75rem; + font-size: 0.75rem; font-weight: 600; color: var(--fg-secondary); - margin-bottom: .25rem; + margin-bottom: 0.25rem; } .form-group input, .form-group select, .form-group textarea { width: 100%; - padding: .4375rem .625rem; + padding: 0.4375rem 0.625rem; border: 1px solid var(--border); border-radius: var(--radius-sm); - font-size: .8125rem; + font-size: 0.8125rem; font-family: inherit; background: var(--bg); color: var(--fg); - transition: border-color .1s, box-shadow .1s; + transition: + border-color 0.1s, + box-shadow 0.1s; } .form-group input:focus, .form-group select:focus, @@ -679,17 +740,18 @@ tbody tr:hover { background: var(--surface-hover); } border-color: var(--accent); box-shadow: 0 0 0 3px var(--accent-bg); } -.form-group textarea { min-height: 56px; resize: vertical; } +.form-group textarea { + min-height: 56px; + resize: vertical; +} .form-group input[type="checkbox"] { width: auto; - margin-right: .375rem; + margin-right: 0.375rem; accent-color: var(--accent); } -/* ================================================================ - Buttons - ================================================================ */ +/* Buttons */ .btn { display: inline-flex; align-items: center; @@ -701,20 +763,25 @@ tbody tr:hover { background: var(--surface-hover); } border: none; border-radius: var(--radius-sm); cursor: pointer; - font-size: .8125rem; + font-size: 0.8125rem; font-weight: 500; font-family: inherit; - transition: opacity .1s; + transition: opacity 0.1s; white-space: nowrap; text-decoration: none; } -.btn:hover { opacity: 0.9; color: #fff; } +.btn:hover { + opacity: 0.9; + color: #fff; +} .btn:disabled { opacity: 0.4; cursor: not-allowed; } -.btn-danger { background: var(--red-fg); } +.btn-danger { + background: var(--red-fg); +} .btn-outline { background: transparent; color: var(--fg-secondary); @@ -726,28 +793,32 @@ tbody tr:hover { background: var(--surface-hover); } } .btn-small { height: 1.625rem; - padding: 0 .5rem; - font-size: .75rem; + padding: 0 0.5rem; + font-size: 0.75rem; } .btn-ghost { background: transparent; color: var(--fg-muted); border: none; } -.btn-ghost:hover { color: var(--fg); } +.btn-ghost:hover { + color: var(--fg); +} -.btn-full { width: 100%; } -.btn + .btn { margin-left: .375rem; } +.btn-full { + width: 100%; +} +.btn + .btn { + margin-left: 0.375rem; +} -/* ================================================================ - Flash Messages - ================================================================ */ +/* Flash Messages */ .flash-message { - padding: .625rem .875rem; + padding: 0.625rem 0.875rem; border: 1px solid; border-radius: var(--radius); - margin-bottom: .75rem; - font-size: .8125rem; + margin-bottom: 0.75rem; + font-size: 0.8125rem; line-height: 1.5; } .flash-error { @@ -761,23 +832,23 @@ tbody tr:hover { background: var(--surface-hover); } color: var(--green-fg); } -/* ================================================================ - Details / Accordion - ================================================================ */ -details { margin-bottom: .75rem; } +/* Details / Accordion */ +details { + margin-bottom: 0.75rem; +} details summary { cursor: pointer; font-weight: 600; - font-size: .8125rem; + font-size: 0.8125rem; color: var(--accent-fg); - padding: .375rem 0; + padding: 0.375rem 0; user-select: none; } -details summary:hover { color: var(--accent-hover); } +details summary:hover { + color: var(--accent-hover); +} -/* ================================================================ - Wizard Steps - ================================================================ */ +/* Wizard Steps */ #wizard { max-width: 720px; margin: 0 auto; @@ -785,47 +856,51 @@ details summary:hover { color: var(--accent-hover); } .wizard-step h2 { display: flex; align-items: center; - gap: .5rem; + gap: 0.5rem; } .wizard-step h2::before { display: none; } .wizard-hint { color: var(--fg-muted); - font-size: .875rem; + font-size: 0.875rem; margin-bottom: 1rem; } .wizard-actions { display: flex; - gap: .375rem; + gap: 0.375rem; margin-top: 1rem; } -.form-status { margin-top: .75rem; } +.form-status { + margin-top: 0.75rem; +} .form-card-wide { max-width: 640px; margin-left: auto; margin-right: auto; } -/* ================================================================ - Action Bars - ================================================================ */ +/* Action Bars */ .action-bar { margin-bottom: 1rem; } -/* ================================================================ - Utility Classes - ================================================================ */ -.text-muted { color: var(--fg-muted); } -.text-sm { font-size: .8125rem; } -.text-center { text-align: center; } +/* Utility Classes */ +.text-muted { + color: var(--fg-muted); +} +.text-sm { + font-size: 0.8125rem; +} +.text-center { + text-align: center; +} .inline-input { width: 100%; - padding: .25rem .4rem; + padding: 0.25rem 0.4rem; border: 1px solid var(--border); border-radius: 4px; - font-size: .85rem; + font-size: 0.85rem; background: var(--bg); color: var(--fg); font-family: inherit; @@ -835,16 +910,16 @@ details summary:hover { color: var(--accent-hover); } border-color: var(--accent); box-shadow: 0 0 0 2px var(--accent-bg); } -.outputs-detail { margin-top: .75rem; } +.outputs-detail { + margin-top: 0.75rem; +} .outputs-list { - margin: .5rem 0 0 1.5rem; - font-size: .85rem; + margin: 0.5rem 0 0 1.5rem; + font-size: 0.85rem; line-height: 1.6; } -/* ================================================================ - Loading Spinner - ================================================================ */ +/* Loading Spinner */ .spinner { display: inline-block; width: 1em; @@ -852,17 +927,17 @@ details summary:hover { color: var(--accent-hover); } border: 2px solid var(--border); border-top-color: var(--accent); border-radius: 50%; - animation: spin .6s linear infinite; + animation: spin 0.6s linear infinite; vertical-align: middle; - margin-right: .375rem; + margin-right: 0.375rem; } @keyframes spin { - to { transform: rotate(360deg); } + to { + transform: rotate(360deg); + } } -/* ================================================================ - Login Page - ================================================================ */ +/* Login Page */ .login-container { display: flex; align-items: center; @@ -876,50 +951,71 @@ details summary:hover { color: var(--accent-hover); } } .login-card h1 { text-align: center; - margin-bottom: .25rem; + margin-bottom: 0.25rem; } .login-subtitle { text-align: center; color: var(--fg-muted); - font-size: .875rem; + font-size: 0.875rem; margin-bottom: 1.5rem; } -.login-form { max-width: 100%; } +.login-form { + max-width: 100%; +} -/* ================================================================ - Section Headers - ================================================================ */ +/* Section Headers */ .section-header { display: flex; align-items: center; justify-content: space-between; - margin: 1.5rem 0 .75rem; + margin: 1.5rem 0 0.75rem; } .section-header h2 { margin: 0; } -/* ================================================================ - Responsive - ================================================================ */ +/* Responsive */ @media (max-width: 768px) { .navbar { - gap: .5rem; + gap: 0.5rem; flex-wrap: wrap; height: auto; - padding: .5rem 1rem; + padding: 0.5rem 1rem; + } + .nav-links { + gap: 0; + overflow-x: auto; + } + .container { + padding: 1rem; + } + .stats-grid { + grid-template-columns: repeat(2, 1fr); + } + .dashboard-grid { + grid-template-columns: 1fr; + } + .filter-form { + flex-direction: column; + align-items: stretch; + } + th, + td { + padding: 0.375rem 0.5rem; + } + .form-card { + max-width: 100%; } - .nav-links { gap: 0; overflow-x: auto; } - .container { padding: 1rem; } - .stats-grid { grid-template-columns: repeat(2, 1fr); } - .dashboard-grid { grid-template-columns: 1fr; } - .filter-form { flex-direction: column; align-items: stretch; } - th, td { padding: .375rem .5rem; } - .form-card { max-width: 100%; } } @media (max-width: 480px) { - .stats-grid { grid-template-columns: 1fr 1fr; } - .quick-actions { flex-direction: column; } - .quick-actions a { justify-content: center; } + .stats-grid { + grid-template-columns: 1fr 1fr; + } + .quick-actions { + flex-direction: column; + } + .quick-actions a { + justify-content: center; + } } diff --git a/flake.nix b/flake.nix index ccdcca7..e4a80c6 100644 --- a/flake.nix +++ b/flake.nix @@ -85,16 +85,23 @@ checks = forAllSystems (system: let pkgs = nixpkgs.legacyPackages.${system}; + vmTests = { + # Split VM integration tests + service-startup = pkgs.callPackage ./nix/tests/startup.nix {inherit self;}; + basic-api = pkgs.callPackage ./nix/tests/basic-api.nix {inherit self;}; + auth-rbac = pkgs.callPackage ./nix/tests/auth-rbac.nix {inherit self;}; + api-crud = pkgs.callPackage ./nix/tests/api-crud.nix {inherit self;}; + features = pkgs.callPackage ./nix/tests/features.nix {inherit self;}; + webhooks = pkgs.callPackage ./nix/tests/webhooks.nix {inherit self;}; + e2e = pkgs.callPackage ./nix/tests/e2e.nix {inherit self;}; + declarative = pkgs.callPackage ./nix/tests/declarative.nix {inherit self;}; + }; in { - # Split VM integration tests - service-startup = pkgs.callPackage ./nix/tests/startup.nix {inherit self;}; - basic-api = pkgs.callPackage ./nix/tests/basic-api.nix {inherit self;}; - auth-rbac = pkgs.callPackage ./nix/tests/auth-rbac.nix {inherit self;}; - api-crud = pkgs.callPackage ./nix/tests/api-crud.nix {inherit self;}; - features = pkgs.callPackage ./nix/tests/features.nix {inherit self;}; - webhooks = pkgs.callPackage ./nix/tests/webhooks.nix {inherit self;}; - e2e = pkgs.callPackage ./nix/tests/e2e.nix {inherit self;}; - declarative = pkgs.callPackage ./nix/tests/declarative.nix {inherit self;}; + inherit (vmTests) service-startup basic-api auth-rbac api-crud features webhooks e2e declarative; + full = pkgs.symlinkJoin { + name = "vm-tests-full"; + paths = builtins.attrValues vmTests; + }; }); devShells = forAllSystems (system: let @@ -125,10 +132,20 @@ runtimeInputs = [ pkgs.alejandra pkgs.fd + pkgs.prettier + pkgs.deno + pkgs.taplo ]; text = '' + # Format Nix with Alejandra fd "$@" -t f -e nix -x alejandra -q '{}' + + # Format TOML with Taplo + fd "$@" -t f -e toml -x taplo fmt '{}' + + # Format CSS with Prettier + fd "$@" -t f -e css -x prettier --write '{}' ''; }); }; diff --git a/nix/demo-vm.nix b/nix/demo-vm.nix index 2c1f034..0296791 100644 --- a/nix/demo-vm.nix +++ b/nix/demo-vm.nix @@ -1,8 +1,14 @@ { - pkgs, self, + pkgs, + lib, }: let fc-packages = self.packages.${pkgs.stdenv.hostPlatform.system}; + + # Demo password file to demonstrate passwordFile option + # Password must be at least 12 characters with at least one uppercase letter + demoPasswordFile = pkgs.writeText "demo-password" "DemoPassword123!"; + nixos = pkgs.nixos ({ modulesPath, pkgs, @@ -11,26 +17,12 @@ imports = [ self.nixosModules.fc-ci (modulesPath + "/virtualisation/qemu-vm.nix") + ./vm-common.nix + + {config._module.args = {inherit self;};} ]; - ## VM hardware - virtualisation = { - memorySize = 2048; - cores = 2; - diskSize = 4096; - graphics = false; - - # Forward guest:3000 -> host:3000 so the dashboard is reachable - forwardPorts = [ - { - from = "host"; - host.port = 3000; - guest.port = 3000; - } - ]; - }; - - services.fc = { + services.fc-ci = { enable = true; package = fc-packages.fc-server; evaluatorPackage = fc-packages.fc-evaluator; @@ -49,9 +41,22 @@ signing.enabled = false; server = { # Bind to all interfaces so port forwarding works - host = "0.0.0.0"; + host = lib.mkForce "0.0.0.0"; port = 3000; - cors_permissive = true; + cors_permissive = lib.mkForce true; + }; + }; + + declarative.users = { + admin = { + email = "admin@localhost"; + password = "AdminPassword123!"; + role = "admin"; + }; + demo = { + email = "demo@localhost"; + role = "read-only"; + passwordFile = toString demoPasswordFile; }; }; }; @@ -89,18 +94,22 @@ psql -U fc -d fc -c "INSERT INTO api_keys (name, key_hash, role) VALUES ('demo-readonly', '$RO_HASH', 'read-only') ON CONFLICT DO NOTHING" 2>/dev/null || true echo "" - echo "===========================================" + echo "=====================================================" echo "" - echo " Dashboard: http://localhost:3000" - echo " Health: http://localhost:3000/health" - echo " API base: http://localhost:3000/api/v1" + echo " Dashboard: http://localhost:3000" + echo " Health: http://localhost:3000/health" + echo " API base: http://localhost:3000/api/v1" echo "" - echo " Admin key: fc_demo_admin_key" + echo " Web login: admin / AdminPassword123! (admin)" + echo " demo / DemoPassword123! (read-only)" + echo "" + echo " Admin API key: fc_demo_admin_key" echo " Read-only key: fc_demo_readonly_key" echo "" - echo " Login at http://localhost:3000/login" - echo " using the admin key above." - echo "===========================================" + echo " Login at http://localhost:3000/login using" + echo " the credentials or the API key provided above." + echo "" + echo "=====================================================" ''; }; @@ -122,21 +131,23 @@ # Show a helpful MOTD environment.etc."motd".text = '' - ┌──────────────────────────────────────────────┐ - │ Dashboard: http://localhost:3000 │ - │ API: http://localhost:3000/api/v1 │ - │ │ - │ Admin API key: fc_demo_admin_key │ - │ Read-only API key: fc_demo_readonly_key │ - │ │ - │ Useful commands: │ - │ $ systemctl status fc-server │ - │ $ journalctl -u fc-server -f │ - │ $ curl -sf localhost:3000/health | jq │ - │ $ curl -sf localhost:3000/metrics │ - │ │ - │ Press Ctrl-a x to quit QEMU. │ - └──────────────────────────────────────────────┘ + ┌─────────────────────────────────────────────────────────────┐ + │ Dashboard: http://localhost:3000 │ + │ API: http://localhost:3000/api/v1 │ + │ │ + │ Web login: admin / AdminPassword123! (admin) │ + │ demo / DemoPassword123! (read-only) │ + │ Admin API key: fc_demo_admin_key │ + │ Read-only API key: fc_demo_readonly_key │ + │ │ + │ Useful commands: │ + │ $ systemctl status fc-server │ + │ $ journalctl -u fc-server -f │ + │ $ curl -sf localhost:3000/health | jq │ + │ $ curl -sf localhost:3000/metrics │ + │ │ + │ Press Ctrl-a x to quit QEMU. │ + └─────────────────────────────────────────────────────────────┘ ''; system.stateVersion = "26.11"; diff --git a/nix/modules/nixos.nix b/nix/modules/nixos.nix index c5dcf63..7b22020 100644 --- a/nix/modules/nixos.nix +++ b/nix/modules/nixos.nix @@ -5,35 +5,72 @@ ... }: let inherit (lib.modules) mkIf mkDefault; - inherit (lib.options) mkOption mkEnableOption; - inherit (lib.types) bool str int package listOf submodule nullOr; - inherit (lib.attrsets) recursiveUpdate optionalAttrs; + inherit (lib.options) mkOption mkEnableOption literalExpression; + inherit (lib.types) bool str int package listOf submodule nullOr enum attrsOf; + inherit (lib.attrsets) recursiveUpdate optionalAttrs mapAttrsToList filterAttrs; inherit (lib.lists) optional map; - cfg = config.services.fc; + cfg = config.services.fc-ci; settingsFormat = pkgs.formats.toml {}; settingsType = settingsFormat.type; # Build the final settings by merging declarative config into settings - finalSettings = recursiveUpdate cfg.settings (optionalAttrs (cfg.declarative.projects != [] || cfg.declarative.apiKeys != []) { + finalSettings = recursiveUpdate cfg.settings (optionalAttrs (cfg.declarative.projects != [] || cfg.declarative.apiKeys != [] || cfg.declarative.users != {} || cfg.declarative.remoteBuilders != []) { declarative = { - projects = - map (p: { + projects = map (p: + filterAttrs (_: v: v != null) { name = p.name; repository_url = p.repositoryUrl; - description = p.description or null; - jobsets = - map (j: { + description = p.description; + jobsets = map (j: + filterAttrs (_: v: v != null) { name = j.name; nix_expression = j.nixExpression; enabled = j.enabled; flake_mode = j.flakeMode; check_interval = j.checkInterval; + state = j.state; + branch = j.branch; + scheduling_shares = j.schedulingShares; + inputs = map (i: + filterAttrs (_: v: v != null) { + name = i.name; + input_type = i.inputType; + value = i.value; + revision = i.revision; + }) + j.inputs; }) - p.jobsets; + p.jobsets; + notifications = + map (n: { + notification_type = n.notificationType; + config = n.config; + enabled = n.enabled; + }) + p.notifications; + webhooks = map (w: + filterAttrs (_: v: v != null) { + forge_type = w.forgeType; + secret_file = w.secretFile; + enabled = w.enabled; + }) + p.webhooks; + channels = + map (c: { + name = c.name; + jobset_name = c.jobsetName; + }) + p.channels; + members = + map (m: { + username = m.username; + role = m.role; + }) + p.members; }) - cfg.declarative.projects; + cfg.declarative.projects; api_keys = map (k: { @@ -42,6 +79,38 @@ role = k.role; }) cfg.declarative.apiKeys; + + users = mapAttrsToList (username: u: let + hasInlinePassword = u.password != null; + _ = + if hasInlinePassword + then builtins.throw "User '${username}' has inline password set. Use passwordFile instead to avoid plaintext passwords in the Nix store." + else null; + in + filterAttrs (_: v: v != null) { + inherit username; + email = u.email; + full_name = u.fullName; + password_file = u.passwordFile; + role = u.role; + enabled = u.enabled; + }) + cfg.declarative.users; + + remote_builders = map (b: + filterAttrs (_: v: v != null) { + name = b.name; + ssh_uri = b.sshUri; + systems = b.systems; + max_jobs = b.maxJobs; + speed_factor = b.speedFactor; + supported_features = b.supportedFeatures; + mandatory_features = b.mandatoryFeatures; + ssh_key_file = b.sshKeyFile; + public_host_key = b.publicHostKey; + enabled = b.enabled; + }) + cfg.declarative.remoteBuilders; }; }); @@ -52,7 +121,7 @@ enabled = mkOption { type = bool; default = true; - description = "Whether this jobset is enabled for evaluation."; + description = "Whether this jobset is enabled for evaluation. Deprecated: use `state` instead."; }; name = mkOption { @@ -62,6 +131,8 @@ nixExpression = mkOption { type = str; + default = "hydraJobs"; + example = literalExpression "packages // checks"; description = "Nix expression to evaluate (e.g. 'packages', 'checks', 'hydraJobs')."; }; @@ -76,6 +147,58 @@ default = 60; description = "Seconds between evaluation checks."; }; + + state = mkOption { + type = enum ["disabled" "enabled" "one_shot" "one_at_a_time"]; + default = "enabled"; + description = '' + Jobset scheduling state: + + * `disabled`: Jobset will not be evaluated + * `enabled`: Normal operation, evaluated according to checkInterval + * `one_shot`: Evaluated once, then automatically set to disabled + * `one_at_a_time`: Only one build can run at a time for this jobset + ''; + }; + + branch = mkOption { + type = nullOr str; + default = null; + description = "Git branch to track. Defaults to repository default branch."; + }; + + schedulingShares = mkOption { + type = int; + default = 100; + description = "Scheduling priority shares. Higher values = more priority."; + }; + + inputs = mkOption { + type = listOf (submodule { + options = { + name = mkOption { + type = str; + description = "Input name."; + }; + inputType = mkOption { + type = str; + default = "git"; + description = "Input type: git, string, boolean, path, or build."; + }; + value = mkOption { + type = str; + description = "Input value."; + }; + revision = mkOption { + type = nullOr str; + default = null; + description = "Git revision (for git inputs)."; + }; + }; + }); + default = []; + description = "Jobset inputs for parameterized evaluations."; + }; }; }; @@ -102,6 +225,87 @@ default = []; description = "Jobsets to create for this project."; }; + + notifications = mkOption { + type = listOf (submodule { + options = { + notificationType = mkOption { + type = str; + description = "Notification type: github_status, email, gitlab_status, gitea_status, run_command."; + }; + config = mkOption { + type = settingsType; + default = {}; + description = "Type-specific configuration."; + }; + enabled = mkOption { + type = bool; + default = true; + description = "Whether this notification is enabled."; + }; + }; + }); + default = []; + description = "Notification configurations for this project."; + }; + + webhooks = mkOption { + type = listOf (submodule { + options = { + forgeType = mkOption { + type = enum ["github" "gitea" "gitlab"]; + description = "Forge type for webhook."; + }; + secretFile = mkOption { + type = nullOr str; + default = null; + description = "Path to file containing webhook secret."; + }; + enabled = mkOption { + type = bool; + default = true; + description = "Whether this webhook is enabled."; + }; + }; + }); + default = []; + description = "Webhook configurations for this project."; + }; + + channels = mkOption { + type = listOf (submodule { + options = { + name = mkOption { + type = str; + description = "Channel name."; + }; + jobsetName = mkOption { + type = str; + description = "Name of the jobset this channel tracks."; + }; + }; + }); + default = []; + description = "Release channels for this project."; + }; + + members = mkOption { + type = listOf (submodule { + options = { + username = mkOption { + type = str; + description = "Username of the member."; + }; + role = mkOption { + type = enum ["member" "maintainer" "admin"]; + default = "member"; + description = "Project role for the member."; + }; + }; + }); + default = []; + description = "Project members with their roles."; + }; }; }; @@ -120,11 +324,13 @@ ''; }; + # FIXME: should be a list, ideally role = mkOption { type = str; default = "admin"; + example = "eval-jobset"; description = '' - Role: + Role, one of: * admin, * read-only, @@ -137,10 +343,131 @@ }; }; }; + + userOpts = { + options = { + enabled = mkOption { + type = bool; + default = true; + description = "Whether this user is enabled."; + }; + + email = mkOption { + type = str; + description = "User's email address."; + }; + + fullName = mkOption { + type = nullOr str; + default = null; + description = "Optional full name for the user."; + }; + + password = mkOption { + type = nullOr str; + default = null; + description = '' + Password provided inline (for dev/testing only). + For production, use {option}`passwordFile` instead. + ''; + }; + + passwordFile = mkOption { + type = nullOr str; + default = null; + description = '' + Path to a file containing the user's password. + Preferred for production deployments. + ''; + }; + + role = mkOption { + type = str; + default = "read-only"; + example = "eval-jobset"; + description = '' + Role, one of: + + * admin, + * read-only, + * create-projects, + * eval-jobset, + * cancel-build, + * restart-jobs, + * bump-to-front. + ''; + }; + }; + }; + + remoteBuilderOpts = { + options = { + name = mkOption { + type = str; + description = "Unique name for this builder."; + }; + + sshUri = mkOption { + type = str; + example = "ssh://builder@builder.example.com"; + description = "SSH URI for connecting to the builder."; + }; + + systems = mkOption { + type = listOf str; + default = ["x86_64-linux"]; + description = "List of systems this builder supports."; + }; + + maxJobs = mkOption { + type = int; + default = 1; + description = "Maximum number of parallel jobs."; + }; + + speedFactor = mkOption { + type = int; + default = 1; + description = "Speed factor for scheduling (higher = faster builder)."; + }; + + supportedFeatures = mkOption { + type = listOf str; + default = []; + description = "List of supported features."; + }; + + mandatoryFeatures = mkOption { + type = listOf str; + default = []; + description = "List of mandatory features."; + }; + + sshKeyFile = mkOption { + type = nullOr str; + default = null; + description = "Path to SSH private key file."; + }; + + publicHostKey = mkOption { + type = nullOr str; + default = null; + description = "SSH public host key for verification."; + }; + + enabled = mkOption { + type = bool; + default = true; + description = "Whether this builder is enabled."; + }; + }; + }; in { - options.services.fc = { + options.services.fc-ci = { enable = mkEnableOption "FC CI system"; + # TODO: could we use `mkPackageOption` here? + # Also for the options below package = mkOption { type = package; description = "The FC server package."; @@ -221,6 +548,47 @@ in { } ]; }; + + users = mkOption { + type = attrsOf (submodule userOpts); + default = {}; + description = '' + Declarative user definitions. The attribute name is the username. + Users are upserted on every server startup. + + Use {option}`passwordFile` with a secrets manager for production deployments. + ''; + example = { + admin = { + email = "admin@example.com"; + passwordFile = "/run/secrets/fc-admin-password"; + role = "admin"; + }; + readonly = { + email = "readonly@example.com"; + passwordFile = "/run/secrets/fc-readonly-password"; + role = "read-only"; + }; + }; + }; + + remoteBuilders = mkOption { + type = listOf (submodule remoteBuilderOpts); + default = []; + description = '' + Declarative remote builder definitions. Builders are upserted on every + server startup for distributed builds. + ''; + example = [ + { + name = "builder1"; + sshUri = "ssh://builder@builder.example.com"; + systems = ["x86_64-linux" "aarch64-linux"]; + maxJobs = 4; + speedFactor = 2; + } + ]; + }; }; database = { @@ -236,7 +604,7 @@ in { }; evaluator = { - enable = mkEnableOption "FC evaluator (git polling and nix evaluation)"; + enable = mkEnableOption "FC evaluator (Git polling and nix evaluation)"; }; queueRunner = { @@ -245,6 +613,15 @@ in { }; config = mkIf cfg.enable { + assertions = + mapAttrsToList ( + username: user: { + assertion = user.password != null || user.passwordFile != null; + message = "User '${username}' must have either 'password' or 'passwordFile' set."; + } + ) + cfg.declarative.users; + users.users.fc = { isSystemUser = true; group = "fc"; @@ -265,7 +642,7 @@ in { ]; }; - services.fc.settings = mkDefault { + services.fc-ci.settings = mkDefault { database.url = "postgresql:///fc?host=/run/postgresql"; server.host = "127.0.0.1"; server.port = 3000; diff --git a/nix/tests/api-crud.nix b/nix/tests/api-crud.nix index 8af1685..704656f 100644 --- a/nix/tests/api-crud.nix +++ b/nix/tests/api-crud.nix @@ -10,7 +10,8 @@ pkgs.testers.nixosTest { self.nixosModules.fc-ci ../vm-common.nix ]; - _module.args.self = self; + + config._module.args = {inherit self;}; }; # API CRUD tests: dashboard content, project/jobset/evaluation/build/channel/builder @@ -18,6 +19,7 @@ pkgs.testers.nixosTest { testScript = '' import hashlib import json + import re machine.start() machine.wait_for_unit("postgresql.service") diff --git a/nix/tests/auth-rbac.nix b/nix/tests/auth-rbac.nix index 5b2e49f..ac2ef48 100644 --- a/nix/tests/auth-rbac.nix +++ b/nix/tests/auth-rbac.nix @@ -133,7 +133,7 @@ pkgs.testers.nixosTest { ) assert code.strip() == "200", f"Expected 200, got {code.strip()}" - ## 3C: API key lifecycle test + # API key lifecycle test with subtest("API key lifecycle: create, use, delete, verify 401"): # Create a new key via admin API result = machine.succeed( @@ -173,7 +173,7 @@ pkgs.testers.nixosTest { ) assert code.strip() == "401", f"Expected 401 after key deletion, got {code.strip()}" - # ---- 3D: CRUD lifecycle test ---- + # CRUD lifecycle test with subtest("CRUD lifecycle: project -> jobset -> list -> delete -> 404"): # Create project result = machine.succeed( @@ -215,7 +215,7 @@ pkgs.testers.nixosTest { ) assert code.strip() == "404", f"Expected 404 after deletion, got {code.strip()}" - # ---- 3E: Edge case tests ---- + # Edge case tests with subtest("Duplicate project name returns 409"): machine.succeed( "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " @@ -249,7 +249,7 @@ pkgs.testers.nixosTest { ) assert code.strip() == "400", f"Expected 400 for XSS name, got {code.strip()}" - # ---- 3F: Security fuzzing ---- + # Security fuzzing with subtest("SQL injection in search query returns 0 results"): result = machine.succeed( "curl -sf 'http://127.0.0.1:3000/api/v1/search?q=test%27%20OR%201%3D1%20--' | jq '.projects | length'" @@ -291,7 +291,7 @@ pkgs.testers.nixosTest { ) assert code.strip() == "400", f"Expected 400 for null bytes, got {code.strip()}" - # ---- 3G: Dashboard page smoke tests ---- + # Dashboard page smoke tests with subtest("All dashboard pages return 200"): pages = ["/", "/projects", "/evaluations", "/builds", "/queue", "/channels", "/admin", "/login"] for page in pages: diff --git a/nix/tests/basic-api.nix b/nix/tests/basic-api.nix index 1b31d36..bebe266 100644 --- a/nix/tests/basic-api.nix +++ b/nix/tests/basic-api.nix @@ -138,12 +138,14 @@ pkgs.testers.nixosTest { with subtest("Builds list with combined filters returns 200"): machine.succeed("curl -sf 'http://127.0.0.1:3000/api/v1/builds?system=x86_64-linux&status=pending&job_name=test' | jq '.items'") - # Metrics endpoint - with subtest("Metrics endpoint returns prometheus format"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/metrics") - assert "fc_builds_total" in result, "Missing fc_builds_total in metrics" - assert "fc_projects_total" in result, "Missing fc_projects_total in metrics" - assert "fc_evaluations_total" in result, "Missing fc_evaluations_total in metrics" + # Prometheus endpoint + with subtest("Prometheus endpoint returns prometheus format"): + result = machine.succeed("curl -sf http://127.0.0.1:3000/prometheus") + machine.succeed(f"echo '{result[:1000]}' > /tmp/metrics.txt") + machine.succeed("echo 'PROMETHEUS OUTPUT:' && cat /tmp/metrics.txt") + assert "fc_builds_total" in result, f"Missing fc_builds_total. Got: {result[:300]}" + assert "fc_projects_total" in result, "Missing fc_projects_total in prometheus metrics" + assert "fc_evaluations_total" in result, "Missing fc_evaluations_total in prometheus metrics" # CORS: default restrictive (no Access-Control-Allow-Origin for cross-origin) with subtest("Default CORS does not allow arbitrary origins"): diff --git a/nix/tests/declarative.nix b/nix/tests/declarative.nix new file mode 100644 index 0000000..0a51072 --- /dev/null +++ b/nix/tests/declarative.nix @@ -0,0 +1,471 @@ +{ + pkgs, + self, +}: let + fc-packages = self.packages.${pkgs.stdenv.hostPlatform.system}; + + # Password files for testing passwordFile option + # Passwords must be at least 12 characters with at least one uppercase letter + adminPasswordFile = pkgs.writeText "admin-password" "SecretAdmin123!"; + userPasswordFile = pkgs.writeText "user-password" "SecretUser123!"; + disabledPasswordFile = pkgs.writeText "disabled-password" "DisabledPass123!"; +in + pkgs.testers.nixosTest { + name = "fc-declarative"; + + nodes.machine = { + imports = [self.nixosModules.fc-ci]; + _module.args.self = self; + + programs.git.enable = true; + security.sudo.enable = true; + environment.systemPackages = with pkgs; [nix nix-eval-jobs zstd curl jq openssl]; + + services.fc-ci = { + enable = true; + package = fc-packages.fc-server; + evaluatorPackage = fc-packages.fc-evaluator; + queueRunnerPackage = fc-packages.fc-queue-runner; + migratePackage = fc-packages.fc-migrate-cli; + + server.enable = true; + evaluator.enable = true; + queueRunner.enable = true; + + settings = { + database.url = "postgresql:///fc?host=/run/postgresql"; + server = { + host = "127.0.0.1"; + port = 3000; + cors_permissive = false; + }; + gc.enabled = false; + logs.log_dir = "/var/lib/fc/logs"; + cache.enabled = true; + signing.enabled = false; + }; + + # Declarative users + declarative.users = { + # Admin user with passwordFile + decl-admin = { + email = "admin@test.local"; + passwordFile = toString adminPasswordFile; + role = "admin"; + }; + # Regular user with passwordFile + decl-user = { + email = "user@test.local"; + passwordFile = toString userPasswordFile; + role = "read-only"; + }; + # User with passwordFile + decl-user2 = { + email = "user2@test.local"; + passwordFile = toString userPasswordFile; + role = "read-only"; + }; + # Disabled user with passwordFile + decl-disabled = { + email = "disabled@test.local"; + passwordFile = toString disabledPasswordFile; + role = "read-only"; + enabled = false; + }; + }; + + # Declarative API keys + declarative.apiKeys = [ + { + name = "decl-admin-key"; + key = "fc_decl_admin"; + role = "admin"; + } + { + name = "decl-readonly-key"; + key = "fc_decl_readonly"; + role = "read-only"; + } + ]; + + # Declarative projects with various jobset states + declarative.projects = [ + { + name = "decl-project-1"; + repositoryUrl = "https://github.com/test/decl1"; + description = "First declarative project"; + jobsets = [ + { + name = "enabled-jobset"; + nixExpression = "packages"; + enabled = true; + flakeMode = true; + checkInterval = 300; + state = "enabled"; + } + { + name = "disabled-jobset"; + nixExpression = "disabled"; + state = "disabled"; + } + { + name = "oneshot-jobset"; + nixExpression = "oneshot"; + state = "one_shot"; + } + { + name = "oneatatime-jobset"; + nixExpression = "exclusive"; + state = "one_at_a_time"; + checkInterval = 60; + } + ]; + } + { + name = "decl-project-2"; + repositoryUrl = "https://github.com/test/decl2"; + jobsets = [ + { + name = "main"; + nixExpression = "."; + flakeMode = true; + } + ]; + } + ]; + }; + }; + + testScript = '' + machine.start() + machine.wait_for_unit("postgresql.service") + machine.wait_until_succeeds("sudo -u fc psql -U fc -d fc -c 'SELECT 1'", timeout=30) + machine.wait_for_unit("fc-server.service") + machine.wait_until_succeeds("curl -sf http://127.0.0.1:3000/health", timeout=30) + + # DECLARATIVE USERS + with subtest("Declarative users are created in database"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM users WHERE username LIKE 'decl-%'\"" + ) + count = int(result.strip()) + assert count == 4, f"Expected 4 declarative users, got {count}" + + with subtest("Declarative admin user has admin role"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT role FROM users WHERE username = 'decl-admin'\"" + ) + assert result.strip() == "admin", f"Expected admin role, got '{result.strip()}'" + + with subtest("Declarative regular users have read-only role"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT role FROM users WHERE username = 'decl-user'\"" + ) + assert result.strip() == "read-only", f"Expected read-only role, got '{result.strip()}'" + + with subtest("Declarative disabled user is disabled"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT enabled FROM users WHERE username = 'decl-disabled'\"" + ) + assert result.strip() == "f", f"Expected disabled (f), got '{result.strip()}'" + + with subtest("Declarative enabled users are enabled"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT enabled FROM users WHERE username = 'decl-admin'\"" + ) + assert result.strip() == "t", f"Expected enabled (t), got '{result.strip()}'" + + with subtest("Declarative users have password hashes set"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT password_hash FROM users WHERE username = 'decl-admin'\"" + ) + # Argon2 hashes start with $argon2 + assert result.strip().startswith("$argon2"), f"Expected argon2 hash, got '{result.strip()[:20]}...'" + + with subtest("User with passwordFile has correct password hash"): + # The password in the file is 'SecretAdmin123!' + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT password_hash FROM users WHERE username = 'decl-admin'\"" + ) + assert len(result.strip()) > 50, "Password hash should be substantial length" + + with subtest("User with inline password has correct password hash"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT password_hash FROM users WHERE username = 'decl-user'\"" + ) + assert result.strip().startswith("$argon2"), f"Expected argon2 hash for inline password user, got '{result.strip()[:20]}...'" + + # DECLARATIVE USER WEB LOGIN + with subtest("Web login with declarative admin user succeeds"): + # Login via POST to /login with username/password + result = machine.succeed( + "curl -s -w '\\n%{http_code}' " + "-X POST http://127.0.0.1:3000/login " + "-d 'username=decl-admin&password=SecretAdmin123!'" + ) + lines = result.strip().split('\n') + code = lines[-1] + # Should redirect (302/303) on success + assert code in ("200", "302", "303"), f"Expected redirect on login, got {code}" + + with subtest("Web login with declarative user (passwordFile) succeeds"): + result = machine.succeed( + "curl -s -w '\\n%{http_code}' " + "-X POST http://127.0.0.1:3000/login " + "-d 'username=decl-user&password=SecretUser123!'" + ) + lines = result.strip().split('\n') + code = lines[-1] + assert code in ("200", "302", "303"), f"Expected redirect on login, got {code}" + + with subtest("Web login with declarative user2 (passwordFile) succeeds"): + result = machine.succeed( + "curl -s -w '\\n%{http_code}' " + "-X POST http://127.0.0.1:3000/login " + "-d 'username=decl-user2&password=SecretUser123!'" + ) + lines = result.strip().split('\n') + code = lines[-1] + assert code in ("200", "302", "303"), f"Expected redirect on login, got {code}" + + with subtest("Web login with wrong password fails"): + result = machine.succeed( + "curl -s -w '\\n%{http_code}' " + "-X POST http://127.0.0.1:3000/login " + "-d 'username=decl-admin&password=wrongpassword'" + ) + lines = result.strip().split('\n') + code = lines[-1] + # Should return 401 for wrong password + assert code in ("401",), f"Expected 401 for wrong password, got {code}" + + with subtest("Web login with disabled user fails"): + result = machine.succeed( + "curl -s -w '\\n%{http_code}' " + "-X POST http://127.0.0.1:3000/login " + "-d 'username=decl-disabled&password=DisabledPass123!'" + ) + lines = result.strip().split('\n') + code = lines[-1] + # Disabled user should not be able to login (401 or 403) + assert code in ("401", "403"), f"Expected login failure for disabled user, got {code}" + + # DECLARATIVE API KEYS + with subtest("Declarative API keys are created"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM api_keys WHERE name LIKE 'decl-%'\"" + ) + count = int(result.strip()) + assert count == 2, f"Expected 2 declarative API keys, got {count}" + + with subtest("Declarative admin API key works"): + code = machine.succeed( + "curl -s -o /dev/null -w '%{http_code}' " + "-H 'Authorization: Bearer fc_decl_admin' " + "http://127.0.0.1:3000/api/v1/projects" + ) + assert code.strip() == "200", f"Expected 200, got {code.strip()}" + + with subtest("Declarative admin API key can create resources"): + code = machine.succeed( + "curl -s -o /dev/null -w '%{http_code}' " + "-X POST http://127.0.0.1:3000/api/v1/projects " + "-H 'Authorization: Bearer fc_decl_admin' " + "-H 'Content-Type: application/json' " + "-d '{\"name\": \"api-created\", \"repository_url\": \"https://example.com/api\"}'" + ) + assert code.strip() == "200", f"Expected 200, got {code.strip()}" + + with subtest("Declarative read-only API key works for GET"): + code = machine.succeed( + "curl -s -o /dev/null -w '%{http_code}' " + "-H 'Authorization: Bearer fc_decl_readonly' " + "http://127.0.0.1:3000/api/v1/projects" + ) + assert code.strip() == "200", f"Expected 200, got {code.strip()}" + + with subtest("Declarative read-only API key cannot create resources"): + code = machine.succeed( + "curl -s -o /dev/null -w '%{http_code}' " + "-X POST http://127.0.0.1:3000/api/v1/projects " + "-H 'Authorization: Bearer fc_decl_readonly' " + "-H 'Content-Type: application/json' " + "-d '{\"name\": \"should-fail\", \"repository_url\": \"https://example.com/fail\"}'" + ) + assert code.strip() == "403", f"Expected 403, got {code.strip()}" + + # DECLARATIVE PROJECTS + with subtest("Declarative projects are created"): + result = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq '.items | map(select(.name | startswith(\"decl-project\"))) | length'" + ) + count = int(result.strip()) + assert count == 2, f"Expected 2 declarative projects, got {count}" + + with subtest("Declarative project has correct repository URL"): + result = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .repository_url'" + ) + assert result.strip() == "https://github.com/test/decl1", f"Got '{result.strip()}'" + + with subtest("Declarative project has description"): + result = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .description'" + ) + assert result.strip() == "First declarative project", f"Got '{result.strip()}'" + + # DECLARATIVE JOBSETS WITH STATES + with subtest("Declarative project has all jobsets"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq '.items | length'" + ) + count = int(result.strip()) + assert count == 4, f"Expected 4 jobsets, got {count}" + + with subtest("Enabled jobset has state 'enabled'"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq -r '.items[] | select(.name==\"enabled-jobset\") | .state'" + ) + assert result.strip() == "enabled", f"Expected 'enabled', got '{result.strip()}'" + + with subtest("Disabled jobset has state 'disabled'"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq -r '.items[] | select(.name==\"disabled-jobset\") | .state'" + ) + assert result.strip() == "disabled", f"Expected 'disabled', got '{result.strip()}'" + + with subtest("One-shot jobset has state 'one_shot'"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq -r '.items[] | select(.name==\"oneshot-jobset\") | .state'" + ) + assert result.strip() == "one_shot", f"Expected 'one_shot', got '{result.strip()}'" + + with subtest("One-at-a-time jobset has state 'one_at_a_time'"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq -r '.items[] | select(.name==\"oneatatime-jobset\") | .state'" + ) + assert result.strip() == "one_at_a_time", f"Expected 'one_at_a_time', got '{result.strip()}'" + + with subtest("Disabled jobset is not in active_jobsets view"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM active_jobsets WHERE name = 'disabled-jobset'\"" + ) + count = int(result.strip()) + assert count == 0, f"Disabled jobset should not be in active_jobsets, got {count}" + + with subtest("Enabled jobsets are in active_jobsets view"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM active_jobsets WHERE name = 'enabled-jobset'\"" + ) + count = int(result.strip()) + assert count == 1, f"Enabled jobset should be in active_jobsets, got {count}" + + with subtest("One-shot jobset is in active_jobsets view"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM active_jobsets WHERE name = 'oneshot-jobset'\"" + ) + count = int(result.strip()) + assert count == 1, f"One-shot jobset should be in active_jobsets, got {count}" + + with subtest("One-at-a-time jobset is in active_jobsets view"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM active_jobsets WHERE name = 'oneatatime-jobset'\"" + ) + count = int(result.strip()) + assert count == 1, f"One-at-a-time jobset should be in active_jobsets, got {count}" + + with subtest("Jobset check_interval is correctly set"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq -r '.items[] | select(.name==\"oneatatime-jobset\") | .check_interval'" + ) + assert result.strip() == "60", f"Expected check_interval 60, got '{result.strip()}'" + + # IDEMPOTENCY + with subtest("Bootstrap is idempotent - no duplicate users"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM users WHERE username = 'decl-admin'\"" + ) + count = int(result.strip()) + assert count == 1, f"Expected exactly 1 decl-admin user, got {count}" + + with subtest("Bootstrap is idempotent - no duplicate projects"): + result = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq '.items | map(select(.name==\"decl-project-1\")) | length'" + ) + count = int(result.strip()) + assert count == 1, f"Expected exactly 1 decl-project-1, got {count}" + + with subtest("Bootstrap is idempotent - no duplicate API keys"): + result = machine.succeed( + "sudo -u fc psql -U fc -d fc -t -c \"SELECT COUNT(*) FROM api_keys WHERE name = 'decl-admin-key'\"" + ) + count = int(result.strip()) + assert count == 1, f"Expected exactly 1 decl-admin-key, got {count}" + + with subtest("Bootstrap is idempotent - no duplicate jobsets"): + project_id = machine.succeed( + "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"decl-project-1\") | .id'" + ).strip() + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets | jq '.items | map(select(.name==\"enabled-jobset\")) | length'" + ) + count = int(result.strip()) + assert count == 1, f"Expected exactly 1 enabled-jobset, got {count}" + + # USER MANAGEMENT UI (admin-only) + with subtest("Users page requires admin access"): + # Test HTML /users endpoint + htmlResp = machine.succeed( + "curl -sf -H 'Authorization: Bearer fc_decl_admin' http://127.0.0.1:3000/users" + ) + assert "User Management" in htmlResp or "Users" in htmlResp + + # Non-admin should be denied access via API + machine.fail( + "curl -sf -H 'Authorization: Bearer fc_decl_readonly' http://127.0.0.1:3000/api/v1/users | grep 'decl-admin'" + ) + # Admin should have access via API + adminApiResp = machine.succeed( + "curl -sf -H 'Authorization: Bearer fc_decl_admin' http://127.0.0.1:3000/api/v1/users" + ) + assert "decl-admin" in adminApiResp, "Expected decl-admin in API response" + assert "decl-user" in adminApiResp, "Expected decl-user in API response" + + with subtest("Users API shows declarative users for admin"): + # Use the admin API key to list users instead of session-based auth + result = machine.succeed( + "curl -sf -H 'Authorization: Bearer fc_decl_admin' http://127.0.0.1:3000/api/v1/users" + ) + assert "decl-admin" in result, f"Users API should return decl-admin. Got: {result[:500]}" + assert "decl-user" in result, f"Users API should return decl-user. Got: {result[:500]}" + + # STARRED JOBS PAGE + with subtest("Starred page exists and returns 200"): + code = machine.succeed( + "curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/starred" + ) + assert code.strip() == "200", f"Expected 200, got {code.strip()}" + + with subtest("Starred page shows login prompt when not logged in"): + body = machine.succeed("curl -sf http://127.0.0.1:3000/starred") + assert "Login required" in body or "login" in body.lower(), "Starred page should prompt for login" + ''; + } diff --git a/nix/tests/e2e.nix b/nix/tests/e2e.nix index 40a4434..c45531a 100644 --- a/nix/tests/e2e.nix +++ b/nix/tests/e2e.nix @@ -18,6 +18,8 @@ pkgs.testers.nixosTest { testScript = '' import hashlib import json + import re + import time machine.start() machine.wait_for_unit("postgresql.service") @@ -52,30 +54,33 @@ pkgs.testers.nixosTest { machine.succeed("mkdir -p /var/lib/fc/test-repos") machine.succeed("git init --bare /var/lib/fc/test-repos/test-flake.git") + # Allow root to push to fc-owned repos (ownership changes after chown below) + machine.succeed("git config --global --add safe.directory /var/lib/fc/test-repos/test-flake.git") + # Create a working copy, write the flake, commit, push machine.succeed("mkdir -p /tmp/test-flake-work") machine.succeed("cd /tmp/test-flake-work && git init") machine.succeed("cd /tmp/test-flake-work && git config user.email 'test@fc' && git config user.name 'FC Test'") # Write a minimal flake.nix that builds a simple derivation - machine.succeed(""" - cat > /tmp/test-flake-work/flake.nix << 'FLAKE' - { - description = "FC CI test flake"; - outputs = { self, ... }: { - packages.x86_64-linux.hello = derivation { - name = "fc-test-hello"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo hello > $out" ]; - }; - }; - } - FLAKE - """) + machine.succeed( + "cat > /tmp/test-flake-work/flake.nix << 'FLAKE'\n" + "{\n" + ' description = "FC CI test flake";\n' + ' outputs = { self, ... }: {\n' + ' packages.x86_64-linux.hello = derivation {\n' + ' name = "fc-test-hello";\n' + ' system = "x86_64-linux";\n' + ' builder = "/bin/sh";\n' + ' args = [ "-c" "echo hello > $out" ];\n' + " };\n" + " };\n" + "}\n" + "FLAKE\n" + ) machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'initial flake'") machine.succeed("cd /tmp/test-flake-work && git remote add origin /var/lib/fc/test-repos/test-flake.git") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") + machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/master") # Set ownership for fc user machine.succeed("chown -R fc:fc /var/lib/fc/test-repos") @@ -86,7 +91,7 @@ pkgs.testers.nixosTest { "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " f"{auth_header} " "-H 'Content-Type: application/json' " - "-d '{\"name\": \"e2e-test\", \"repository_url\": \"https://github.com/nixos/nixpkgs\"}' " + "-d '{\"name\": \"e2e-test\", \"repository_url\": \"file:///var/lib/fc/test-repos/test-flake.git\"}' " "| jq -r .id" ) e2e_project_id = result.strip() @@ -96,7 +101,7 @@ pkgs.testers.nixosTest { f"curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets " f"{auth_header} " "-H 'Content-Type: application/json' " - "-d '{\"name\": \"packages\", \"nix_expression\": \"packages\", \"flake_mode\": true, \"enabled\": true, \"check_interval\": 5, \"branch\": null, \"scheduling_shares\": 100}' " + "-d '{\"name\": \"packages\", \"nix_expression\": \"packages\", \"flake_mode\": true, \"enabled\": true, \"check_interval\": 60}' " "| jq -r .id" ) e2e_jobset_id = result.strip() @@ -158,24 +163,24 @@ pkgs.testers.nixosTest { ).strip()) # Push a new commit - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake v2"; - outputs = { self, ... }: { - packages.x86_64-linux.hello = derivation { - name = "fc-test-hello-v2"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo hello-v2 > $out" ]; - }; - }; - } - FLAKE - """) + machine.succeed( + "cd /tmp/test-flake-work && \\\n" + "cat > flake.nix << 'FLAKE'\n" + "{\n" + ' description = "FC CI test flake v2";\n' + ' outputs = { self, ... }: {\n' + ' packages.x86_64-linux.hello = derivation {\n' + ' name = "fc-test-hello-v2";\n' + ' system = "x86_64-linux";\n' + ' builder = "/bin/sh";\n' + ' args = [ "-c" "echo hello-v2 > $out" ];\n' + " };\n" + " };\n" + "}\n" + "FLAKE\n" + ) machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'v2 update'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") + machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/master") # Wait for evaluator to detect and create new evaluation machine.wait_until_succeeds( @@ -385,43 +390,38 @@ pkgs.testers.nixosTest { # Create a new simple build to trigger notification # Push a trivial change to trigger a new evaluation - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake notify"; - outputs = { self, ... }: { - packages.x86_64-linux.notify-test = derivation { - name = "fc-notify-test"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo notify-test > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger notification test'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluator to create new evaluation - machine.wait_until_succeeds( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' " - "| jq '.items | length' | grep -v '^2$'", - timeout=60 + machine.succeed( + "cd /tmp/test-flake-work && \\\n" + "cat > flake.nix << 'FLAKE'\n" + "{\n" + ' description = "FC CI test flake notify";\n' + ' outputs = { self, ... }: {\n' + ' packages.x86_64-linux.notify-test = derivation {\n' + ' name = "fc-notify-test";\n' + ' system = "x86_64-linux";\n' + ' builder = "/bin/sh";\n' + ' args = [ "-c" "echo notify-test > $out" ];\n' + " };\n" + " };\n" + "}\n" + "FLAKE\n" ) + machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger notification test'") + machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/master") - # Get the new build ID - notify_build_id = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=notify-test' | jq -r '.items[0].id'" - ).strip() - - # Wait for the build to complete + # Wait for the notify-test build to complete machine.wait_until_succeeds( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{notify_build_id} | jq -e 'select(.status==\"completed\")'", + "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=notify-test' " + "| jq -e '.items[] | select(.status==\"completed\")'", timeout=120 ) + # Get the build ID + notify_build_id = machine.succeed( + "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=notify-test' " + "| jq -r '.items[] | select(.status==\"completed\") | .id' | head -1" + ).strip() + # Wait a bit for notification to dispatch time.sleep(5) @@ -455,43 +455,38 @@ pkgs.testers.nixosTest { with subtest("Signed builds have valid signatures"): # Create a new build to test signing - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake signing"; - outputs = { self, ... }: { - packages.x86_64-linux.sign-test = derivation { - name = "fc-sign-test"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo signed-build > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger signing test'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluation - machine.wait_until_succeeds( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' " - "| jq '.items | length' | grep -v '^[23]$'", - timeout=60 + machine.succeed( + "cd /tmp/test-flake-work && \\\n" + "cat > flake.nix << 'FLAKE'\n" + "{\n" + ' description = "FC CI test flake signing";\n' + ' outputs = { self, ... }: {\n' + ' packages.x86_64-linux.sign-test = derivation {\n' + ' name = "fc-sign-test";\n' + ' system = "x86_64-linux";\n' + ' builder = "/bin/sh";\n' + ' args = [ "-c" "echo signed-build > $out" ];\n' + " };\n" + " };\n" + "}\n" + "FLAKE\n" ) + machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger signing test'") + machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/master") - # Get the sign-test build - sign_build_id = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=sign-test' | jq -r '.items[0].id'" - ).strip() - - # Wait for build to complete + # Wait for the sign-test build to complete machine.wait_until_succeeds( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{sign_build_id} | jq -e 'select(.status==\"completed\")'", + "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=sign-test' " + "| jq -e '.items[] | select(.status==\"completed\")'", timeout=120 ) + # Get the sign-test build ID + sign_build_id = machine.succeed( + "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=sign-test' " + "| jq -r '.items[] | select(.status==\"completed\") | .id' | head -1" + ).strip() + # Verify the build has signed=true signed = machine.succeed( f"curl -sf http://127.0.0.1:3000/api/v1/builds/{sign_build_id} | jq -r .signed" @@ -529,24 +524,24 @@ pkgs.testers.nixosTest { machine.succeed("chown -R fc:fc /nix/var/nix/gcroots/per-user/fc") # Create a new build to test GC root creation - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake gc"; - outputs = { self, ... }: { - packages.x86_64-linux.gc-test = derivation { - name = "fc-gc-test"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo gc-test > $out" ]; - }; - }; - } - FLAKE - """) + machine.succeed( + "cd /tmp/test-flake-work && \\\n" + "cat > flake.nix << 'FLAKE'\n" + "{\n" + ' description = "FC CI test flake gc";\n' + ' outputs = { self, ... }: {\n' + ' packages.x86_64-linux.gc-test = derivation {\n' + ' name = "fc-gc-test";\n' + ' system = "x86_64-linux";\n' + ' builder = "/bin/sh";\n' + ' args = [ "-c" "echo gc-test > $out" ];\n' + " };\n" + " };\n" + "}\n" + "FLAKE\n" + ) machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger gc test'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") + machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/master") # Wait for evaluation and build machine.wait_until_succeeds( @@ -561,25 +556,35 @@ pkgs.testers.nixosTest { # Verify GC root symlink was created # The symlink should be in /nix/var/nix/gcroots/per-user/fc/ and point to the build output - gc_roots = machine.succeed("find /nix/var/nix/gcroots/per-user/fc -type l 2>/dev/null || true").strip() - - # Check if any symlink points to our build output - if gc_roots: - found_root = False + # Wait for GC root to be created (polling with timeout) + def wait_for_gc_root(): + gc_roots = machine.succeed("find /nix/var/nix/gcroots/per-user/fc -type l 2>/dev/null || true").strip() + if not gc_roots: + return False for root in gc_roots.split('\n'): if root: target = machine.succeed(f"readlink -f {root} 2>/dev/null || true").strip() if target == gc_build_output: - found_root = True - break + return True + return False - # We might have GC roots, this is expected behavior - # The key thing is that the build output exists and is protected from GC - machine.succeed(f"test -e {gc_build_output}") - else: - # If no GC roots yet, at least verify the build output exists - # GC roots might be created asynchronously - machine.succeed(f"test -e {gc_build_output}") + # Poll for GC root creation (give queue-runner time to create it) + machine.wait_until_succeeds( + "test -e /nix/var/nix/gcroots/per-user/fc", + timeout=30 + ) + + # Wait for a symlink pointing to our build output to appear + import time + found = False + for _ in range(10): + if wait_for_gc_root(): + found = True + break + time.sleep(1) + + # Verify build output exists and is protected from GC + machine.succeed(f"test -e {gc_build_output}") with subtest("Declarative .fc.toml in repo auto-creates jobset"): # Add .fc.toml to the test repo with a new jobset definition @@ -594,7 +599,7 @@ pkgs.testers.nixosTest { FCTOML """) machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'add declarative config'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") + machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/master") # Wait for evaluator to pick up the new commit and process declarative config machine.wait_until_succeeds( diff --git a/nix/tests/features.nix b/nix/tests/features.nix index 40e8c12..caaa6f6 100644 --- a/nix/tests/features.nix +++ b/nix/tests/features.nix @@ -16,6 +16,7 @@ pkgs.testers.nixosTest { # Feature tests: logging, CSS, setup wizard, probe, metrics improvements testScript = '' import hashlib + import re machine.start() machine.wait_for_unit("postgresql.service") diff --git a/nix/tests/s3-cache.nix b/nix/tests/s3-cache.nix new file mode 100644 index 0000000..86cacf4 --- /dev/null +++ b/nix/tests/s3-cache.nix @@ -0,0 +1,206 @@ +{ + pkgs, + self, +}: +pkgs.testers.nixosTest { + name = "fc-s3-cache-upload"; + + nodes.machine = { + imports = [ + self.nixosModules.fc-ci + ../vm-common.nix + ]; + _module.args.self = self; + + # Add MinIO for S3-compatible storage + services.minio = { + enable = true; + listenAddress = "127.0.0.1:9000"; + rootCredentialsFile = pkgs.writeText "minio-root-credentials" '' + MINIO_ROOT_USER=minioadmin + MINIO_ROOT_PASSWORD=minioadmin + ''; + }; + + # Configure FC to upload to the local MinIO instance + services.fc-ci = { + settings = { + cache_upload = { + enabled = true; + store_uri = "s3://fc-cache?endpoint=http://127.0.0.1:9000®ion=us-east-1"; + s3 = { + region = "us-east-1"; + access_key_id = "minioadmin"; + secret_access_key = "minioadmin"; + endpoint_url = "http://127.0.0.1:9000"; + use_path_style = true; + }; + }; + }; + }; + }; + + testScript = '' + import hashlib + import json + import time + + machine.start() + + # Wait for PostgreSQL + machine.wait_for_unit("postgresql.service") + machine.wait_until_succeeds("sudo -u fc psql -U fc -d fc -c 'SELECT 1'", timeout=30) + + # Wait for MinIO to be ready + machine.wait_for_unit("minio.service") + machine.wait_until_succeeds("curl -sf http://127.0.0.1:9000/minio/health/live", timeout=30) + + # Configure MinIO client and create bucket + machine.succeed("${pkgs.minio-client}/bin/mc alias set local http://127.0.0.1:9000 minioadmin minioadmin") + machine.succeed("${pkgs.minio-client}/bin/mc mb local/fc-cache") + machine.succeed("${pkgs.minio-client}/bin/mc policy set public local/fc-cache") + + machine.wait_for_unit("fc-server.service") + machine.wait_until_succeeds("curl -sf http://127.0.0.1:3000/health", timeout=30) + + # Seed an API key for write operations + api_token = "fc_testkey123" + api_hash = hashlib.sha256(api_token.encode()).hexdigest() + machine.succeed( + f"sudo -u fc psql -U fc -d fc -c \"INSERT INTO api_keys (name, key_hash, role) VALUES ('test', '{api_hash}', 'admin')\"" + ) + auth_header = f"-H 'Authorization: Bearer {api_token}'" + + # Create a test flake inside the VM + with subtest("Create bare git repo with test flake"): + machine.succeed("mkdir -p /var/lib/fc/test-repos") + machine.succeed("git init --bare /var/lib/fc/test-repos/s3-test-flake.git") + + # Create a working copy, write the flake, commit, push + machine.succeed("mkdir -p /tmp/s3-test-flake") + machine.succeed("cd /tmp/s3-test-flake && git init") + machine.succeed("cd /tmp/s3-test-flake && git config user.email 'test@fc' && git config user.name 'FC Test'") + + # Write a minimal flake.nix that builds a simple derivation + machine.succeed(""" + cat > /tmp/s3-test-flake/flake.nix << 'FLAKE' + { + description = "FC CI S3 cache test flake"; + outputs = { self, ... }: { + packages.x86_64-linux.s3-test = derivation { + name = "fc-s3-test"; + system = "x86_64-linux"; + builder = "/bin/sh"; + args = [ "-c" "echo s3-cache-test-content > $out" ]; + }; + }; + } + FLAKE + """) + machine.succeed("cd /tmp/s3-test-flake && git add -A && git commit -m 'initial flake'") + machine.succeed("cd /tmp/s3-test-flake && git remote add origin /var/lib/fc/test-repos/s3-test-flake.git") + machine.succeed("cd /tmp/s3-test-flake && git push origin HEAD:refs/heads/master") + machine.succeed("chown -R fc:fc /var/lib/fc/test-repos") + + # Create project + jobset + with subtest("Create S3 test project and jobset"): + result = machine.succeed( + "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " + f"{auth_header} " + "-H 'Content-Type: application/json' " + "-d '{\"name\": \"s3-test-project\", \"repository_url\": \"file:///var/lib/fc/test-repos/s3-test-flake.git\"}' " + "| jq -r .id" + ) + project_id = result.strip() + assert len(project_id) == 36, f"Expected UUID, got '{project_id}'" + + result = machine.succeed( + f"curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/{project_id}/jobsets " + f"{auth_header} " + "-H 'Content-Type: application/json' " + "-d '{\"name\": \"packages\", \"nix_expression\": \"packages\", \"flake_mode\": true, \"enabled\": true, \"check_interval\": 60}' " + "| jq -r .id" + ) + jobset_id = result.strip() + assert len(jobset_id) == 36, f"Expected UUID for jobset, got '{jobset_id}'" + + # Wait for evaluator to create evaluation and builds + with subtest("Evaluator discovers and evaluates the flake"): + machine.wait_until_succeeds( + f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={jobset_id}' " + "| jq -e '.items[] | select(.status==\"completed\")'", + timeout=90 + ) + + # Get the build ID + with subtest("Get build ID for s3-test job"): + build_id = machine.succeed( + "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=s3-test' | jq -r '.items[0].id'" + ).strip() + assert len(build_id) == 36, f"Expected UUID for build, got '{build_id}'" + + # Wait for queue runner to build it + with subtest("Queue runner builds pending derivation"): + machine.wait_until_succeeds( + f"curl -sf http://127.0.0.1:3000/api/v1/builds/{build_id} | jq -e 'select(.status==\"completed\")'", + timeout=120 + ) + + # Verify build completed successfully + with subtest("Build completed successfully"): + result = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/builds/{build_id} | jq -r .status" + ).strip() + assert result == "completed", f"Expected completed status, got '{result}'" + + output_path = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/builds/{build_id} | jq -r .build_output_path" + ).strip() + assert output_path.startswith("/nix/store/"), f"Expected /nix/store/ output path, got '{output_path}'" + + # Wait a bit for cache upload to complete (it's async after build) + with subtest("Wait for cache upload to complete"): + time.sleep(5) + + # Verify the build output was uploaded to S3 + with subtest("Build output was uploaded to S3 cache"): + # List objects in the S3 bucket + bucket_contents = machine.succeed("${pkgs.minio-client}/bin/mc ls --recursive local/fc-cache/") + + # Should have the .narinfo file and the .nar file + assert ".narinfo" in bucket_contents, f"Expected .narinfo file in bucket, got: {bucket_contents}" + assert ".nar" in bucket_contents, f"Expected .nar file in bucket, got: {bucket_contents}" + + # Verify we can download the narinfo from the S3 bucket + with subtest("Can download narinfo from S3 bucket"): + # Get the store hash from the output path + store_hash = output_path.split('/')[3].split('-')[0] + + # Try to get the narinfo from S3 + narinfo_content = machine.succeed( + f"curl -sf http://127.0.0.1:9000/fc-cache/{store_hash}.narinfo" + ) + assert "StorePath:" in narinfo_content, f"Expected StorePath in narinfo: {narinfo_content}" + assert "NarHash:" in narinfo_content, f"Expected NarHash in narinfo: {narinfo_content}" + + # Verify build log mentions cache upload + with subtest("Build log mentions cache upload"): + build_log = machine.succeed( + f"curl -sf http://127.0.0.1:3000/api/v1/builds/{build_id}/log" + ) + # The nix copy output should appear in the log or the system log + # We'll check that the cache upload was attempted by looking at system logs + journal_log = machine.succeed("journalctl -u fc-queue-runner --since '5 minutes ago' --no-pager") + assert "Pushed to binary cache" in journal_log or "nix copy" in journal_log, \ + f"Expected cache upload in logs: {journal_log}" + + # Cleanup + with subtest("Delete S3 test project"): + code = machine.succeed( + "curl -s -o /dev/null -w '%{http_code}' " + f"-X DELETE http://127.0.0.1:3000/api/v1/projects/{project_id} " + f"{auth_header}" + ) + assert code.strip() == "200", f"Expected 200 for project delete, got {code.strip()}" + ''; +} diff --git a/nix/vm-common.nix b/nix/vm-common.nix index ed582c1..1fa3640 100644 --- a/nix/vm-common.nix +++ b/nix/vm-common.nix @@ -1,25 +1,56 @@ -# Common VM configuration for FC integration tests { self, pkgs, + lib, ... }: let + inherit (lib.modules) mkDefault; fc-packages = self.packages.${pkgs.stdenv.hostPlatform.system}; in { # Common machine configuration for all FC integration tests config = { + ## VM hardware + virtualisation = { + memorySize = 2048; + cores = 2; + diskSize = 10000; + graphics = false; + + # Forward guest:3000 -> host:3000 so the dashboard is reachable + forwardPorts = [ + { + from = "host"; + host.port = 3000; + guest.port = 3000; + } + ]; + }; + + # Machine config programs.git.enable = true; security.sudo.enable = true; # Ensure nix and zstd are available for cache endpoints environment.systemPackages = with pkgs; [nix nix-eval-jobs zstd curl jq openssl]; - services.fc = { + # Enable Nix flakes and nix-command experimental features required by evaluator + nix.settings.experimental-features = ["nix-command" "flakes"]; + + # VM tests have no network. We need to disable substituters to prevent + # Nix from trying to contact cache.nixos.org and timing out each time. + nix.settings.substituters = lib.mkForce []; + + # Allow incoming requests on port 3000 to make the dashboard accessible from + # the host machine. + networking.firewall.allowedTCPPorts = [3000]; + + services.fc-ci = { enable = true; - package = fc-packages.fc-server; - evaluatorPackage = fc-packages.fc-evaluator; - queueRunnerPackage = fc-packages.fc-queue-runner; - migratePackage = fc-packages.fc-migrate-cli; + + package = mkDefault fc-packages.fc-server; + evaluatorPackage = mkDefault fc-packages.fc-evaluator; + queueRunnerPackage = mkDefault fc-packages.fc-queue-runner; + migratePackage = mkDefault fc-packages.fc-migrate-cli; server.enable = true; evaluator.enable = true; @@ -45,39 +76,47 @@ in { show_timestamps = true; }; - evaluator.poll_interval = 5; - evaluator.work_dir = "/var/lib/fc/evaluator"; - queue_runner.poll_interval = 3; - queue_runner.work_dir = "/var/lib/fc/queue-runner"; + evaluator = { + poll_interval = 5; + work_dir = "/var/lib/fc/evaluator"; + nix_timeout = 60; + }; - # Declarative bootstrap: project + API key created on startup - declarative = { - projects = [ - { - name = "declarative-project"; - repository_url = "https://github.com/test/declarative"; - description = "Bootstrap test project"; - jobsets = [ - { - name = "packages"; - nix_expression = "packages"; - enabled = true; - flake_mode = true; - check_interval = 300; - } - ]; - } - ]; - - api_keys = [ - { - name = "bootstrap-admin"; - key = "fc_bootstrap_key"; - role = "admin"; - } - ]; + queue_runner = { + poll_interval = 3; + work_dir = "/var/lib/fc/queue-runner"; }; }; + + # Declarative configuration for VM tests + # This is set outside of settings so the NixOS module can transform field names + declarative.apiKeys = [ + { + name = "bootstrap-admin"; + key = "fc_bootstrap_key"; + role = "admin"; + } + ]; + + # Declarative project for tests that expect bootstrapped data + # Jobset is disabled so evaluator won't try to fetch from GitHub + declarative.projects = [ + { + name = "declarative-project"; + repositoryUrl = "https://github.com/test/declarative"; + description = "Test declarative project"; + jobsets = [ + { + name = "packages"; + nixExpression = "packages"; + flakeMode = true; + enabled = true; + checkInterval = 3600; + state = "disabled"; # disabled: exists but won't be evaluated + } + ]; + } + ]; }; }; } diff --git a/nix/vm-test.nix b/nix/vm-test.nix deleted file mode 100644 index 05920ce..0000000 --- a/nix/vm-test.nix +++ /dev/null @@ -1,2216 +0,0 @@ -{ - pkgs, - fc-packages, - nixosModule, -}: -pkgs.testers.nixosTest { - name = "fc-integration"; - - nodes.machine = {pkgs, ...}: { - imports = [nixosModule]; - - services.fc = { - enable = true; - package = fc-packages.fc-server; - evaluatorPackage = fc-packages.fc-evaluator; - queueRunnerPackage = fc-packages.fc-queue-runner; - migratePackage = fc-packages.fc-migrate-cli; - - server.enable = true; - evaluator.enable = true; - queueRunner.enable = true; - - settings = { - database.url = "postgresql:///fc?host=/run/postgresql"; - server = { - host = "127.0.0.1"; - port = 3000; - cors_permissive = false; - }; - - gc.enabled = false; - logs.log_dir = "/var/lib/fc/logs"; - cache.enabled = true; - signing.enabled = false; - - tracing = { - level = "info"; - format = "compact"; - show_targets = true; - show_timestamps = true; - }; - - evaluator.poll_interval = 5; - evaluator.work_dir = "/var/lib/fc/evaluator"; - queue_runner.poll_interval = 3; - queue_runner.work_dir = "/var/lib/fc/queue-runner"; - - # Declarative bootstrap: project + API key created on startup - declarative.projects = [ - { - name = "declarative-project"; - repository_url = "https://github.com/test/declarative"; - description = "Bootstrap test project"; - jobsets = [ - { - name = "packages"; - nix_expression = "packages"; - enabled = true; - flake_mode = true; - check_interval = 300; - } - ]; - } - ]; - declarative.api_keys = [ - { - name = "bootstrap-admin"; - key = "fc_bootstrap_key"; - role = "admin"; - } - ]; - }; - }; - - # Ensure nix and zstd are available for cache endpoints - environment.systemPackages = with pkgs; [nix nix-eval-jobs zstd curl jq sudo git openssl]; - }; - - testScript = '' - import hashlib - import json - import re - import time - - machine.start() - machine.wait_for_unit("postgresql.service") - - # Ensure PostgreSQL is actually ready to accept connections before fc-server starts - machine.wait_until_succeeds("sudo -u fc psql -U fc -d fc -c 'SELECT 1'", timeout=30) - - machine.wait_for_unit("fc-server.service") - - # Wait for the server to start listening - machine.wait_until_succeeds("curl -sf http://127.0.0.1:3000/health", timeout=30) - - # ---- Verify all three services start ---- - with subtest("fc-evaluator.service starts without crash"): - machine.wait_for_unit("fc-evaluator.service", timeout=30) - # Check journalctl for no "binary not found" errors - result = machine.succeed("journalctl -u fc-evaluator --no-pager -n 20 2>&1") - assert "binary not found" not in result.lower(), f"Evaluator has 'binary not found' error: {result}" - assert "No such file" not in result, f"Evaluator has 'No such file' error: {result}" - - with subtest("fc-queue-runner.service starts without crash"): - machine.wait_for_unit("fc-queue-runner.service", timeout=30) - result = machine.succeed("journalctl -u fc-queue-runner --no-pager -n 20 2>&1") - assert "binary not found" not in result.lower(), f"Queue runner has 'binary not found' error: {result}" - assert "No such file" not in result, f"Queue runner has 'No such file' error: {result}" - - with subtest("All three FC services are active"): - for svc in ["fc-server", "fc-evaluator", "fc-queue-runner"]: - result = machine.succeed(f"systemctl is-active {svc}") - assert result.strip() == "active", f"Expected {svc} to be active, got '{result.strip()}'" - - # ---- Seed an API key for write operations ---- - # Token: fc_testkey123 -> SHA-256 hash inserted into api_keys table - api_token = "fc_testkey123" - api_hash = hashlib.sha256(api_token.encode()).hexdigest() - machine.succeed( - f"sudo -u fc psql -U fc -d fc -c \"INSERT INTO api_keys (name, key_hash, role) VALUES ('test', '{api_hash}', 'admin')\"" - ) - auth_header = f"-H 'Authorization: Bearer {api_token}'" - - # ======================================================================== - # Phase 0: Declarative Bootstrap Tests - # ======================================================================== - - with subtest("Declarative project was bootstrapped"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/projects | jq '.items[] | select(.name==\"declarative-project\") | .name' -r" - ) - assert result.strip() == "declarative-project", f"Expected declarative-project, got '{result.strip()}'" - - with subtest("Declarative project has correct repository URL"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/projects | jq '.items[] | select(.name==\"declarative-project\") | .repository_url' -r" - ) - assert result.strip() == "https://github.com/test/declarative", f"Expected declarative repo URL, got '{result.strip()}'" - - with subtest("Declarative project has bootstrapped jobset"): - # Get the declarative project ID - decl_project_id = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/projects | jq '.items[] | select(.name==\"declarative-project\") | .id' -r" - ).strip() - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/projects/{decl_project_id}/jobsets | jq '.items[0].name' -r" - ) - assert result.strip() == "packages", f"Expected packages jobset, got '{result.strip()}'" - - with subtest("Declarative API key works for authentication"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - "-H 'Authorization: Bearer fc_bootstrap_key' " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"bootstrap-auth-test\", \"repository_url\": \"https://example.com/bootstrap\"}'" - ) - assert code.strip() == "200", f"Expected 200 with bootstrap key, got {code.strip()}" - - with subtest("Bootstrap is idempotent (server restarted successfully with same config)"): - # The server already started successfully with declarative config - that proves - # the bootstrap ran. We verify no duplicate projects were created. - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/projects | jq '[.items[] | select(.name==\"declarative-project\")] | length'" - ) - assert result.strip() == "1", f"Expected exactly 1 declarative-project, got {result.strip()}" - - # ======================================================================== - # Phase 0B: Security Headers Tests - # ======================================================================== - - with subtest("X-Content-Type-Options nosniff header present"): - result = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/health | grep -i x-content-type-options" - ) - assert "nosniff" in result.lower(), f"Expected nosniff, got: {result}" - - with subtest("X-Frame-Options DENY header present"): - result = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/health | grep -i x-frame-options" - ) - assert "deny" in result.lower(), f"Expected DENY, got: {result}" - - with subtest("Referrer-Policy header present"): - result = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/health | grep -i referrer-policy" - ) - assert "strict-origin-when-cross-origin" in result.lower(), f"Expected strict-origin-when-cross-origin, got: {result}" - - with subtest("Security headers present on API routes too"): - result = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/api/v1/projects 2>&1" - ) - assert "nosniff" in result.lower(), "API route missing X-Content-Type-Options" - assert "deny" in result.lower(), "API route missing X-Frame-Options" - - # ======================================================================== - # Phase 0C: Error Message Quality Tests - # ======================================================================== - - with subtest("404 error returns structured JSON with error_code"): - result = machine.succeed( - "curl -s http://127.0.0.1:3000/api/v1/projects/00000000-0000-0000-0000-000000000000" - ) - assert len(result.strip()) > 0, "Expected non-empty response body for 404" - parsed = json.loads(result) - assert "error" in parsed, f"Missing 'error' field in: {result}" - assert "error_code" in parsed, f"Missing 'error_code' field in: {result}" - assert parsed["error_code"] == "NOT_FOUND", f"Expected NOT_FOUND, got {parsed['error_code']}" - - with subtest("409 conflict error includes meaningful message"): - # First create a project - machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"error-msg-test\", \"repository_url\": \"https://example.com/err\"}'" - ) - # Try creating duplicate — check status code first - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"error-msg-test\", \"repository_url\": \"https://example.com/err2\"}'" - ) - assert code.strip() == "409", f"Expected 409 for duplicate, got {code.strip()}" - # Verify the response body is structured JSON with error details - result = machine.succeed( - "curl -s -X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"error-msg-test\", \"repository_url\": \"https://example.com/err2\"}'" - ) - parsed = json.loads(result) - assert "error" in parsed, f"Missing error field in conflict response: {result}" - assert parsed.get("error_code") == "CONFLICT", f"Expected CONFLICT error_code, got: {parsed}" - # Error message should not be generic "Internal server error" - assert "internal" not in parsed["error"].lower(), \ - f"Error message should not be generic 'Internal server error': {parsed['error']}" - - with subtest("401 error returns structured JSON"): - result = machine.succeed( - "curl -s -X POST http://127.0.0.1:3000/api/v1/projects " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"x\", \"repository_url\": \"https://example.com/x\"}'" - ) - try: - parsed = json.loads(result) - assert "error" in parsed, f"Missing error field in 401: {result}" - except json.JSONDecodeError: - # Auth middleware may return non-JSON 401; verify status code instead - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"x\", \"repository_url\": \"https://example.com/x\"}'" - ) - assert code.strip() == "401", f"Expected 401, got {code.strip()}" - - # ---- Health endpoint ---- - with subtest("Health endpoint returns OK"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/health | jq -r .status") - assert result.strip() == "ok", f"Expected 'ok', got '{result.strip()}'" - - with subtest("Health endpoint reports database healthy"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/health | jq -r .database") - assert result.strip() == "true", f"Expected 'true', got '{result.strip()}'" - - # ---- Cache endpoint: nix-cache-info ---- - with subtest("Cache info endpoint returns correct data"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/nix-cache/nix-cache-info") - assert "StoreDir: /nix/store" in result, f"Missing StoreDir in: {result}" - assert "WantMassQuery: 1" in result, f"Missing WantMassQuery in: {result}" - - # ---- Cache endpoint: invalid hash rejection ---- - with subtest("Cache rejects short hash"): - machine.succeed("curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/nix-cache/tooshort.narinfo | grep -q 404") - - with subtest("Cache rejects uppercase hash"): - machine.succeed("curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/nix-cache/ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEF.narinfo | grep -q 404") - - with subtest("Cache rejects special chars in hash"): - machine.succeed("curl -s -o /dev/null -w '%{http_code}' 'http://127.0.0.1:3000/nix-cache/abcdefghijklmnop____abcde.narinfo' | grep -q 404") - - with subtest("Cache returns 404 for valid but nonexistent hash"): - machine.succeed("curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/nix-cache/abcdefghijklmnopqrstuvwxyz012345.narinfo | grep -q 404") - - # ---- NAR endpoints: invalid hash rejection ---- - with subtest("NAR zst rejects invalid hash"): - machine.succeed("curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/nix-cache/nar/INVALID.nar.zst | grep -q 404") - - with subtest("NAR plain rejects invalid hash"): - machine.succeed("curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/nix-cache/nar/INVALID.nar | grep -q 404") - - # ---- Search endpoint: length validation ---- - with subtest("Search rejects empty query"): - result = machine.succeed("curl -sf 'http://127.0.0.1:3000/api/v1/search?q=' | jq '.projects | length'") - assert result.strip() == "0", f"Expected 0 projects, got {result.strip()}" - - with subtest("Search rejects overly long query"): - long_q = "a" * 300 - result = machine.succeed(f"curl -sf 'http://127.0.0.1:3000/api/v1/search?q={long_q}' | jq '.projects | length'") - assert result.strip() == "0", f"Expected 0 projects for long query, got {result.strip()}" - - # ---- Error response format ---- - with subtest("404 error response includes error_code field"): - json_result = machine.succeed("curl -s http://127.0.0.1:3000/api/v1/projects/00000000-0000-0000-0000-000000000000 | jq -r .error_code") - assert json_result.strip() == "NOT_FOUND", f"Expected NOT_FOUND, got {json_result.strip()}" - - # ---- Empty page states (before any data is created) ---- - with subtest("Empty evaluations page has proper empty state"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/evaluations") - assert "Page 1 of 0" not in body, \ - "Evaluations page should NOT show 'Page 1 of 0' when empty" - assert "No evaluations yet" in body, \ - "Empty evaluations page should show helpful empty state message" - - with subtest("Empty builds page has proper empty state"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/builds") - assert "Page 1 of 0" not in body, \ - "Builds page should NOT show 'Page 1 of 0' when empty" - assert "No builds match" in body, \ - "Empty builds page should show helpful empty state message" - - with subtest("Empty channels page has proper empty state"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/channels") - assert "No channels configured" in body, \ - "Empty channels page should show helpful empty state" - - with subtest("Tables use table-wrap containers on projects page"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/projects") - # Projects page should have at least one project (from bootstrap) - assert "table-wrap" in body, \ - "Projects page should wrap tables in .table-wrap class" - - # ---- API CRUD: create and list projects ---- - with subtest("Create a project via API"): - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"test-project\", \"repository_url\": \"https://github.com/test/repo\"}' " - "| jq -r .id" - ) - project_id = result.strip() - assert len(project_id) == 36, f"Expected UUID, got '{project_id}'" - - with subtest("List projects includes created project"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/api/v1/projects | jq '.items[0].name'") - assert "test-project" in result, f"Expected test-project in: {result}" - - # ---- Builds list with filters ---- - with subtest("Builds list with system filter returns 200"): - machine.succeed("curl -sf 'http://127.0.0.1:3000/api/v1/builds?system=x86_64-linux' | jq '.items'") - - with subtest("Builds list with job_name filter returns 200"): - machine.succeed("curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=hello' | jq '.items'") - - with subtest("Builds list with combined filters returns 200"): - machine.succeed("curl -sf 'http://127.0.0.1:3000/api/v1/builds?system=x86_64-linux&status=pending&job_name=test' | jq '.items'") - - # ---- Metrics endpoint ---- - with subtest("Metrics endpoint returns prometheus format"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/metrics") - assert "fc_builds_total" in result, "Missing fc_builds_total in metrics" - assert "fc_projects_total" in result, "Missing fc_projects_total in metrics" - assert "fc_evaluations_total" in result, "Missing fc_evaluations_total in metrics" - - # ---- CORS: default restrictive (no Access-Control-Allow-Origin for cross-origin) ---- - with subtest("Default CORS does not allow arbitrary origins"): - result = machine.succeed( - "curl -s -D - " - "-H 'Origin: http://evil.example.com' " - "http://127.0.0.1:3000/health " - "2>&1" - ) - # With restrictive CORS, there should be no access-control-allow-origin header - # for an arbitrary origin - assert "access-control-allow-origin: http://evil.example.com" not in result.lower(), \ - f"CORS should not allow arbitrary origins: {result}" - - # ---- Systemd hardening ---- - with subtest("fc-server runs as fc user"): - result = machine.succeed("systemctl show fc-server --property=User --value") - assert result.strip() == "fc", f"Expected fc user, got '{result.strip()}'" - - with subtest("fc-server has NoNewPrivileges"): - result = machine.succeed("systemctl show fc-server --property=NoNewPrivileges --value") - assert result.strip() == "yes", f"Expected NoNewPrivileges, got '{result.strip()}'" - - with subtest("fc user home directory exists"): - machine.succeed("test -d /var/lib/fc") - - with subtest("Log directory exists"): - machine.succeed("test -d /var/lib/fc/logs || mkdir -p /var/lib/fc/logs") - - # ---- Stats endpoint ---- - with subtest("Build stats endpoint returns data"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/api/v1/builds/stats | jq '.total_builds'") - # Should be a number (possibly 0) - int(result.strip()) - - with subtest("Recent builds endpoint returns array"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/api/v1/builds/recent | jq 'type'") - assert result.strip() == '"array"', f"Expected array, got {result.strip()}" - - # ======================================================================== - # Phase 3: Authentication & RBAC tests - # ======================================================================== - - # ---- 3A: Authentication tests ---- - with subtest("Unauthenticated POST returns 401"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"unauth-test\", \"repository_url\": \"https://example.com/repo\"}'" - ) - assert code.strip() == "401", f"Expected 401, got {code.strip()}" - - with subtest("Wrong token POST returns 401"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - "-H 'Authorization: Bearer fc_wrong_token_here' " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"bad-auth-test\", \"repository_url\": \"https://example.com/repo\"}'" - ) - assert code.strip() == "401", f"Expected 401, got {code.strip()}" - - with subtest("Valid token POST returns 200"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"auth-test-project\", \"repository_url\": \"https://example.com/auth-repo\"}'" - ) - assert code.strip() == "200", f"Expected 200, got {code.strip()}" - - with subtest("GET without token returns 200"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/projects" - ) - assert code.strip() == "200", f"Expected 200, got {code.strip()}" - - # ---- 3B: RBAC tests ---- - # Seed a read-only key - ro_token = "fc_readonly_key" - ro_hash = hashlib.sha256(ro_token.encode()).hexdigest() - machine.succeed( - f"sudo -u fc psql -U fc -d fc -c \"INSERT INTO api_keys (name, key_hash, role) VALUES ('readonly', '{ro_hash}', 'read-only')\"" - ) - ro_header = f"-H 'Authorization: Bearer {ro_token}'" - - with subtest("Read-only key POST project returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{ro_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"ro-attempt\", \"repository_url\": \"https://example.com/ro\"}'" - ) - assert code.strip() == "403", f"Expected 403, got {code.strip()}" - - with subtest("Read-only key POST admin/builders returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/admin/builders " - f"{ro_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"bad-builder\", \"ssh_uri\": \"ssh://x@y\", \"systems\": [\"x86_64-linux\"]}'" - ) - assert code.strip() == "403", f"Expected 403, got {code.strip()}" - - with subtest("Admin key POST admin/builders returns 200"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/admin/builders " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"test-builder\", \"ssh_uri\": \"ssh://nix@builder\", \"systems\": [\"x86_64-linux\"], \"max_jobs\": 2}'" - ) - assert code.strip() == "200", f"Expected 200, got {code.strip()}" - - with subtest("Admin key create and delete API key"): - # Create - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/api-keys " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"ephemeral\", \"role\": \"read-only\"}'" - ) - key_data = json.loads(result) - assert "id" in key_data, f"Expected id in response: {result}" - key_id = key_data["id"] - # Delete - code = machine.succeed( - f"curl -s -o /dev/null -w '%{{http_code}}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/api-keys/{key_id} " - f"{auth_header}" - ) - assert code.strip() == "200", f"Expected 200, got {code.strip()}" - - # ---- 3C: API key lifecycle test ---- - with subtest("API key lifecycle: create, use, delete, verify 401"): - # Create a new key via admin API - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/api-keys " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"lifecycle-test\", \"role\": \"admin\"}'" - ) - lc_data = json.loads(result) - lc_key = lc_data["key"] - lc_id = lc_data["id"] - lc_header = f"-H 'Authorization: Bearer {lc_key}'" - - # Use the new key to create a project - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{lc_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"lifecycle-project\", \"repository_url\": \"https://example.com/lc\"}'" - ) - assert code.strip() == "200", f"Expected 200 with new key, got {code.strip()}" - - # Delete the key - machine.succeed( - f"curl -sf -X DELETE http://127.0.0.1:3000/api/v1/api-keys/{lc_id} " - f"{auth_header}" - ) - - # Verify deleted key returns 401 - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{lc_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"should-fail\", \"repository_url\": \"https://example.com/fail\"}'" - ) - assert code.strip() == "401", f"Expected 401 after key deletion, got {code.strip()}" - - # ---- 3D: CRUD lifecycle test ---- - with subtest("CRUD lifecycle: project -> jobset -> list -> delete -> 404"): - # Create project - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"crud-test\", \"repository_url\": \"https://example.com/crud\"}' " - "| jq -r .id" - ) - crud_project_id = result.strip() - - # Create jobset - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/{crud_project_id}/jobsets " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"main\", \"nix_expression\": \".\"}' " - "| jq -r .id" - ) - jobset_id = result.strip() - assert len(jobset_id) == 36, f"Expected UUID for jobset, got '{jobset_id}'" - - # List jobsets (should have at least 1) - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/projects/{crud_project_id}/jobsets | jq '.items | length'" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 jobset, got {result.strip()}" - - # Delete project (cascades) - machine.succeed( - f"curl -sf -X DELETE http://127.0.0.1:3000/api/v1/projects/{crud_project_id} " - f"{auth_header}" - ) - - # Verify project returns 404 - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"http://127.0.0.1:3000/api/v1/projects/{crud_project_id}" - ) - assert code.strip() == "404", f"Expected 404 after deletion, got {code.strip()}" - - # ---- 3E: Edge case tests ---- - with subtest("Duplicate project name returns 409"): - machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"dup-test\", \"repository_url\": \"https://example.com/dup\"}'" - ) - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"dup-test\", \"repository_url\": \"https://example.com/dup2\"}'" - ) - assert code.strip() == "409", f"Expected 409 for duplicate, got {code.strip()}" - - with subtest("Invalid UUID path returns 400"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/projects/not-a-uuid" - ) - assert code.strip() == "400", f"Expected 400 for invalid UUID, got {code.strip()}" - - with subtest("XSS in project name returns 400"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"\", \"repository_url\": \"https://example.com/xss\"}'" - ) - assert code.strip() == "400", f"Expected 400 for XSS name, got {code.strip()}" - - # ---- 3F: Security fuzzing ---- - with subtest("SQL injection in search query returns 0 results"): - result = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/search?q=test%27%20OR%201%3D1%20--' | jq '.projects | length'" - ) - assert result.strip() == "0", f"Expected 0, got {result.strip()}" - # Verify projects table is intact - count = machine.succeed( - "sudo -u fc psql -U fc -d fc -t -c 'SELECT COUNT(*) FROM projects'" - ) - assert int(count.strip()) > 0, "Projects table seems damaged" - - with subtest("Path traversal in cache returns 404"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "'http://127.0.0.1:3000/nix-cache/nar/../../../etc/passwd.nar'" - ) - # Should be 404 (not 200) - assert code.strip() in ("400", "404"), f"Expected 400/404 for path traversal, got {code.strip()}" - - with subtest("Oversized request body returns 413"): - # Generate a payload larger than 10MB (the default max_body_size) - code = machine.succeed( - "dd if=/dev/zero bs=1M count=12 2>/dev/null | " - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "--data-binary @-" - ) - assert code.strip() == "413", f"Expected 413 for oversized body, got {code.strip()}" - - with subtest("NULL bytes in project name returns 400"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"null\\u0000byte\", \"repository_url\": \"https://example.com/null\"}'" - ) - assert code.strip() == "400", f"Expected 400 for null bytes, got {code.strip()}" - - # ---- 3G: Dashboard page smoke tests ---- - with subtest("All dashboard pages return 200"): - pages = ["/", "/projects", "/evaluations", "/builds", "/queue", "/channels", "/admin", "/login"] - for page in pages: - code = machine.succeed( - f"curl -s -o /dev/null -w '%{{http_code}}' http://127.0.0.1:3000{page}" - ) - assert code.strip() == "200", f"Page {page} returned {code.strip()}, expected 200" - - # ======================================================================== - # Phase 4: Dashboard Content & Deep Functional Tests - # ======================================================================== - - # ---- 4A: Dashboard content verification ---- - with subtest("Home page contains Dashboard heading"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/") - assert "Dashboard" in body, "Home page missing 'Dashboard' heading" - - with subtest("Home page contains stats grid"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/") - assert "stat-card" in body, "Home page missing stats grid" - assert "Completed" in body, "Home page missing 'Completed' stat" - - with subtest("Home page shows project overview table"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/") - # We created projects earlier, they should appear - assert "test-project" in body, "Home page should list test-project in overview" - - with subtest("Projects page contains created projects"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/projects") - assert "test-project" in body, "Projects page should list test-project" - - with subtest("Projects page returns HTML content type"): - ct = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/projects | grep -i content-type" - ) - assert "text/html" in ct.lower(), f"Expected text/html, got: {ct}" - - with subtest("Admin page shows system status"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/admin") - assert "Administration" in body, "Admin page missing heading" - assert "System Status" in body, "Admin page missing system status section" - assert "Remote Builders" in body, "Admin page missing remote builders section" - - with subtest("Queue page renders"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/queue") - assert "Queue" in body or "Pending" in body or "Running" in body, \ - "Queue page missing expected content" - - with subtest("Channels page renders"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/channels") - # Page should render even if empty - assert "Channel" in body or "channel" in body, "Channels page missing expected content" - - with subtest("Builds page renders with filter params"): - body = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/builds?status=pending&system=x86_64-linux'" - ) - assert "Build" in body or "build" in body, "Builds page missing expected content" - - with subtest("Evaluations page renders"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/evaluations") - assert "Evaluation" in body or "evaluation" in body, "Evaluations page missing expected content" - - with subtest("Login page contains form"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/login") - assert "api_key" in body or "API" in body, "Login page missing API key input" - assert "= 1, f"Expected at least 1 jobset, got {result.strip()}" - - with subtest("Jobset detail page renders"): - body = machine.succeed( - f"curl -sf http://127.0.0.1:3000/jobset/{test_jobset_id}" - ) - assert "main" in body, "Jobset detail page should show jobset name" - - # ---- 4E: Evaluation trigger and lifecycle ---- - with subtest("Trigger evaluation via API"): - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/evaluations/trigger " - f"{auth_header} " - "-H 'Content-Type: application/json' " - f"-d '{{\"jobset_id\": \"{test_jobset_id}\", \"commit_hash\": \"abcdef1234567890abcdef1234567890abcdef12\"}}' " - "| jq -r .id" - ) - test_eval_id = result.strip() - assert len(test_eval_id) == 36, f"Expected UUID for evaluation, got '{test_eval_id}'" - - with subtest("Get evaluation by ID"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/evaluations/{test_eval_id} | jq -r .status" - ) - assert result.strip().lower() == "pending", f"Expected pending status, got: {result.strip()}" - - with subtest("List evaluations includes triggered one"): - result = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={test_jobset_id}' | jq '.items | length'" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 evaluation, got {result.strip()}" - - with subtest("Evaluation detail dashboard page renders"): - body = machine.succeed( - f"curl -sf http://127.0.0.1:3000/evaluation/{test_eval_id}" - ) - assert "abcdef123456" in body, "Evaluation page should show commit hash prefix" - - with subtest("Trigger evaluation with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/evaluations/trigger " - f"{ro_header} " - "-H 'Content-Type: application/json' " - f"-d '{{\"jobset_id\": \"{test_jobset_id}\", \"commit_hash\": \"0000000000000000000000000000000000000000\"}}'" - ) - assert code.strip() == "403", f"Expected 403 for read-only eval trigger, got {code.strip()}" - - # ---- 4E2: Build lifecycle (restart, bump) ---- - # Create a build via SQL since builds are normally created by the evaluator - with subtest("Create test build via SQL"): - machine.succeed( - "sudo -u fc psql -U fc -d fc -c \"" - "INSERT INTO builds (id, evaluation_id, job_name, drv_path, status, system, priority, created_at) " - f"VALUES ('aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee', '{test_eval_id}', 'hello', '/nix/store/fake.drv', 'failed', 'x86_64-linux', 5, NOW())" - "\"" - ) - test_build_id = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" - - with subtest("Get build by ID"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{test_build_id} | jq -r .status" - ) - assert result.strip().lower() == "failed", f"Expected failed, got: {result.strip()}" - - with subtest("Restart failed build"): - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/builds/{test_build_id}/restart " - f"{auth_header} " - "| jq -r .status" - ) - assert result.strip().lower() == "pending", f"Expected pending status for restarted build, got: {result.strip()}" - - with subtest("Restart with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/builds/{test_build_id}/restart " - f"{ro_header}" - ) - assert code.strip() == "403", f"Expected 403 for read-only restart, got {code.strip()}" - - # Create a pending build to test bump - with subtest("Create pending build for bump test"): - machine.succeed( - "sudo -u fc psql -U fc -d fc -c \"" - "INSERT INTO builds (id, evaluation_id, job_name, drv_path, status, system, priority, created_at) " - f"VALUES ('bbbbbbbb-cccc-dddd-eeee-ffffffffffff', '{test_eval_id}', 'world', '/nix/store/fake2.drv', 'pending', 'x86_64-linux', 5, NOW())" - "\"" - ) - bump_build_id = "bbbbbbbb-cccc-dddd-eeee-ffffffffffff" - - with subtest("Bump build priority"): - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/builds/{bump_build_id}/bump " - f"{auth_header} " - "| jq -r .priority" - ) - assert int(result.strip()) == 15, f"Expected priority 15 (5+10), got: {result.strip()}" - - with subtest("Bump with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/builds/{bump_build_id}/bump " - f"{ro_header}" - ) - assert code.strip() == "403", f"Expected 403 for read-only bump, got {code.strip()}" - - with subtest("Cancel build"): - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/builds/{bump_build_id}/cancel " - f"{auth_header} " - "| jq '.[0].status'" - ) - assert "cancelled" in result.strip().lower(), f"Expected cancelled, got: {result.strip()}" - - # ---- 4E3: Evaluation comparison ---- - with subtest("Trigger second evaluation for comparison"): - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/evaluations/trigger " - f"{auth_header} " - "-H 'Content-Type: application/json' " - f"-d '{{\"jobset_id\": \"{test_jobset_id}\", \"commit_hash\": \"deadbeef1234567890abcdef1234567890abcdef\"}}' " - "| jq -r .id" - ) - second_eval_id = result.strip() - # Add a build to the second evaluation - machine.succeed( - "sudo -u fc psql -U fc -d fc -c \"" - "INSERT INTO builds (id, evaluation_id, job_name, drv_path, status, system, priority, created_at) " - f"VALUES ('cccccccc-dddd-eeee-ffff-aaaaaaaaaaaa', '{second_eval_id}', 'hello', '/nix/store/changed.drv', 'pending', 'x86_64-linux', 5, NOW())" - "\"" - ) - machine.succeed( - "sudo -u fc psql -U fc -d fc -c \"" - "INSERT INTO builds (id, evaluation_id, job_name, drv_path, status, system, priority, created_at) " - f"VALUES ('dddddddd-eeee-ffff-aaaa-bbbbbbbbbbbb', '{second_eval_id}', 'new-pkg', '/nix/store/new.drv', 'pending', 'x86_64-linux', 5, NOW())" - "\"" - ) - - with subtest("Compare evaluations shows diff"): - result = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations/{test_eval_id}/compare?to={second_eval_id}'" - ) - data = json.loads(result) - # hello changed derivation, world was removed, new-pkg was added - assert len(data["changed_jobs"]) >= 1, f"Expected at least 1 changed job, got {data['changed_jobs']}" - assert len(data["new_jobs"]) >= 1, f"Expected at least 1 new job, got {data['new_jobs']}" - assert any(j["job_name"] == "new-pkg" for j in data["new_jobs"]), "new-pkg should be in new_jobs" - - # ---- 4F: Channel CRUD lifecycle ---- - with subtest("Create channel via API"): - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/channels " - f"{auth_header} " - "-H 'Content-Type: application/json' " - f"-d '{{\"project_id\": \"{project_id}\", \"name\": \"stable\", \"jobset_id\": \"{test_jobset_id}\"}}' " - "| jq -r .id" - ) - test_channel_id = result.strip() - assert len(test_channel_id) == 36, f"Expected UUID for channel, got '{test_channel_id}'" - - with subtest("List channels includes new channel"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/channels | jq 'length'" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 channel, got {result.strip()}" - - with subtest("Get channel by ID"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/channels/{test_channel_id} | jq -r .name" - ) - assert result.strip() == "stable", f"Expected 'stable', got: {result.strip()}" - - with subtest("List project channels"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/projects/{project_id}/channels | jq 'length'" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 project channel, got {result.strip()}" - - with subtest("Promote channel to evaluation"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/channels/{test_channel_id}/promote/{test_eval_id} " - f"{auth_header}" - ) - assert code.strip() == "200", f"Expected 200 for channel promote, got {code.strip()}" - - with subtest("Channel promote with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/channels/{test_channel_id}/promote/{test_eval_id} " - f"{ro_header}" - ) - assert code.strip() == "403", f"Expected 403 for read-only promote, got {code.strip()}" - - with subtest("Create channel with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/channels " - f"{ro_header} " - "-H 'Content-Type: application/json' " - f"-d '{{\"project_id\": \"{project_id}\", \"name\": \"nightly\", \"jobset_id\": \"{test_jobset_id}\"}}'" - ) - assert code.strip() == "403", f"Expected 403 for read-only channel create, got {code.strip()}" - - with subtest("Delete channel"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/channels/{test_channel_id} " - f"{auth_header}" - ) - assert code.strip() == "200", f"Expected 200 for channel delete, got {code.strip()}" - - # ---- 4G: Remote builder CRUD lifecycle ---- - with subtest("List remote builders"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/admin/builders | jq 'length'" - ) - # We created one earlier in 3B - assert int(result.strip()) >= 1, f"Expected at least 1 builder, got {result.strip()}" - - with subtest("Get remote builder by ID"): - # Get the first builder's ID - builder_id = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/admin/builders | jq -r '.[0].id'" - ).strip() - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/admin/builders/{builder_id} | jq -r .name" - ) - assert result.strip() == "test-builder", f"Expected 'test-builder', got: {result.strip()}" - - with subtest("Update remote builder (disable)"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X PUT http://127.0.0.1:3000/api/v1/admin/builders/{builder_id} " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"enabled\": false}'" - ) - assert code.strip() == "200", f"Expected 200 for builder update, got {code.strip()}" - - with subtest("Updated builder is disabled"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/admin/builders/{builder_id} | jq -r .enabled" - ) - assert result.strip() == "false", f"Expected false, got: {result.strip()}" - - with subtest("Update builder with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X PUT http://127.0.0.1:3000/api/v1/admin/builders/{builder_id} " - f"{ro_header} " - "-H 'Content-Type: application/json' " - "-d '{\"enabled\": true}'" - ) - assert code.strip() == "403", f"Expected 403 for read-only builder update, got {code.strip()}" - - with subtest("Delete remote builder with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/admin/builders/{builder_id} " - f"{ro_header}" - ) - assert code.strip() == "403", f"Expected 403 for read-only builder delete, got {code.strip()}" - - with subtest("Delete remote builder with admin key"): - # First clear the builder_id from builds that reference it - machine.succeed( - "sudo -u fc psql -U fc -d fc -c " - f"\"UPDATE builds SET builder_id = NULL WHERE builder_id = '{builder_id}'\"" - ) - # Now delete the builder - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/admin/builders/{builder_id} " - f"{auth_header}" - ) - assert code.strip() == "200", f"Expected 200 for builder delete, got {code.strip()}" - - # ---- 4H: Admin system status endpoint ---- - with subtest("System status endpoint requires admin"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/admin/system " - f"{ro_header}" - ) - assert code.strip() == "403", f"Expected 403 for read-only system status, got {code.strip()}" - - with subtest("System status endpoint returns data with admin key"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/admin/system " - f"{auth_header} " - "| jq .projects_count" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 project in system status, got {result.strip()}" - - # ---- 4I: API key listing ---- - with subtest("List API keys requires admin"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/api-keys " - f"{ro_header}" - ) - assert code.strip() == "403", f"Expected 403 for read-only API key list, got {code.strip()}" - - with subtest("List API keys returns array with admin key"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/api-keys " - f"{auth_header} " - "| jq 'length'" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 API key, got {result.strip()}" - - # ---- 4J: Badge endpoints ---- - with subtest("Badge endpoint returns SVG for unknown project"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/job/nonexistent/main/hello/shield" - ) - # Should return 404 or error since project doesn't exist - assert code.strip() in ("404", "500"), f"Expected 404/500 for unknown badge, got {code.strip()}" - - with subtest("Badge endpoint returns SVG for existing project"): - # Create a badge-compatible project name lookup - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/job/test-project/main/hello/shield" - ) - # Should return 200 with SVG (even if no builds, shows "not found" badge) - assert code.strip() == "200", f"Expected 200 for badge, got {code.strip()}" - - with subtest("Badge returns SVG content type"): - ct = machine.succeed( - "curl -s -D - -o /dev/null " - "http://127.0.0.1:3000/api/v1/job/test-project/main/hello/shield " - "| grep -i content-type" - ) - assert "image/svg+xml" in ct.lower(), f"Expected SVG content type, got: {ct}" - - with subtest("Latest build endpoint for unknown project returns 404"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/job/nonexistent/main/hello/latest" - ) - assert code.strip() in ("404", "500"), f"Expected 404/500 for latest build, got {code.strip()}" - - # ---- 4K: Pagination tests ---- - # Re-verify server is healthy before pagination tests - machine.wait_until_succeeds("curl -sf http://127.0.0.1:3000/health", timeout=15) - - with subtest("Projects pagination with limit and offset"): - result = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/projects?limit=1&offset=0' | jq '.items | length'" - ) - assert int(result.strip()) == 1, f"Expected 1 project with limit=1, got {result.strip()}" - - with subtest("Projects pagination returns total count"): - result = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/projects?limit=1&offset=0' | jq '.total'" - ) - assert int(result.strip()) >= 2, f"Expected at least 2 total projects, got {result.strip()}" - - with subtest("Builds pagination with limit"): - result = machine.succeed( - "curl -s 'http://127.0.0.1:3000/api/v1/builds?limit=5'" - ) - data = json.loads(result) - assert "limit" in data, f"Expected paginated response with 'limit' field, got: {result[:300]}" - assert data["limit"] == 5, f"Expected limit=5, got {data['limit']}" - - with subtest("Evaluations pagination with limit"): - result = machine.succeed( - "curl -s 'http://127.0.0.1:3000/api/v1/evaluations?limit=2'" - ) - data = json.loads(result) - assert "limit" in data, f"Expected paginated response with 'limit' field, got: {result[:300]}" - assert data["limit"] == 2, f"Expected limit=2, got {data['limit']}" - - # ---- 4L: Build sub-resources ---- - with subtest("Build steps endpoint returns empty array for nonexistent build"): - result = machine.succeed( - "curl -sf " - "http://127.0.0.1:3000/api/v1/builds/00000000-0000-0000-0000-000000000000/steps" - " | jq 'length'" - ) - assert int(result.strip()) == 0, f"Expected empty steps array, got {result.strip()}" - - with subtest("Build products endpoint returns empty array for nonexistent build"): - result = machine.succeed( - "curl -sf " - "http://127.0.0.1:3000/api/v1/builds/00000000-0000-0000-0000-000000000000/products" - " | jq 'length'" - ) - assert int(result.strip()) == 0, f"Expected empty products array, got {result.strip()}" - - with subtest("Build log endpoint for nonexistent build returns 404"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/builds/00000000-0000-0000-0000-000000000000/log" - ) - assert code.strip() == "404", f"Expected 404 for nonexistent build log, got {code.strip()}" - - # ---- 4M: Search functionality ---- - with subtest("Search returns matching projects"): - result = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/search?q=test-project' | jq '.projects | length'" - ) - assert int(result.strip()) >= 1, f"Expected at least 1 matching project, got {result.strip()}" - - with subtest("Search returns empty for nonsense query"): - result = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/search?q=zzzznonexistent99999' | jq '.projects | length'" - ) - assert result.strip() == "0", f"Expected 0, got {result.strip()}" - - # ---- 4N: Content-Type verification for API endpoints ---- - with subtest("API endpoints return application/json"): - ct = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/api/v1/projects | grep -i content-type" - ) - assert "application/json" in ct.lower(), f"Expected application/json, got: {ct}" - - with subtest("Health endpoint returns application/json"): - ct = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/health | grep -i content-type" - ) - assert "application/json" in ct.lower(), f"Expected application/json, got: {ct}" - - with subtest("Metrics endpoint returns text/plain"): - ct = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/metrics | grep -i content-type" - ) - assert "text/plain" in ct.lower() or "text/" in ct.lower(), f"Expected text content type for metrics, got: {ct}" - - # ---- 4O: Session/Cookie auth for dashboard ---- - with subtest("Login with valid API key sets session cookie"): - result = machine.succeed( - "curl -s -D - -o /dev/null " - "-X POST http://127.0.0.1:3000/login " - f"-d 'api_key={api_token}'" - ) - assert "fc_session=" in result, f"Expected fc_session cookie in response: {result}" - assert "HttpOnly" in result, "Expected HttpOnly flag on session cookie" - - with subtest("Login with invalid API key shows error"): - body = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/login " - "-d 'api_key=fc_invalid_key'" - ) - assert "Invalid" in body or "invalid" in body or "error" in body.lower(), \ - f"Expected error message for invalid login: {body[:200]}" - - with subtest("Login with empty API key shows error"): - body = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/login " - "-d 'api_key='" - ) - assert "required" in body.lower() or "error" in body.lower() or "Invalid" in body, \ - f"Expected error message for empty login: {body[:200]}" - - with subtest("Session cookie grants admin access on dashboard"): - # Login and capture cookie - cookie = machine.succeed( - "curl -s -D - -o /dev/null " - "-X POST http://127.0.0.1:3000/login " - f"-d 'api_key={api_token}' " - "| grep -i set-cookie | head -1" - ) - match = re.search(r'fc_session=([^;]+)', cookie) - if match: - session_val = match.group(1) - body = machine.succeed( - f"curl -sf -H 'Cookie: fc_session={session_val}' http://127.0.0.1:3000/admin" - ) - # Admin page with session should show API Keys section and admin controls - assert "API Keys" in body, "Admin page with session should show API Keys section" - - with subtest("Logout clears session cookie"): - result = machine.succeed( - "curl -s -D - -o /dev/null -X POST http://127.0.0.1:3000/logout" - ) - assert "Max-Age=0" in result or "max-age=0" in result.lower(), \ - "Logout should set Max-Age=0 to clear cookie" - - # ---- 4P: RBAC with create-projects role ---- - cp_token = "fc_createprojects_key" - cp_hash = hashlib.sha256(cp_token.encode()).hexdigest() - machine.succeed( - f"sudo -u fc psql -U fc -d fc -c \"INSERT INTO api_keys (name, key_hash, role) VALUES ('creator', '{cp_hash}', 'create-projects')\"" - ) - cp_header = f"-H 'Authorization: Bearer {cp_token}'" - - with subtest("create-projects role can create project"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects " - f"{cp_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"creator-project\", \"repository_url\": \"https://example.com/creator\"}'" - ) - assert code.strip() == "200", f"Expected 200 for create-projects role, got {code.strip()}" - - with subtest("create-projects role cannot delete project"): - # Get the new project ID - cp_project_id = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/projects | jq -r '.items[] | select(.name==\"creator-project\") | .id'" - ).strip() - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/projects/{cp_project_id} " - f"{cp_header}" - ) - assert code.strip() == "403", f"Expected 403 for create-projects role DELETE, got {code.strip()}" - - with subtest("create-projects role cannot update project"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X PUT http://127.0.0.1:3000/api/v1/projects/{cp_project_id} " - f"{cp_header} " - "-H 'Content-Type: application/json' " - "-d '{\"description\": \"hacked\"}'" - ) - assert code.strip() == "403", f"Expected 403 for create-projects PUT, got {code.strip()}" - - with subtest("create-projects role cannot access admin endpoints"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/admin/system " - f"{cp_header}" - ) - assert code.strip() == "403", f"Expected 403 for create-projects system status, got {code.strip()}" - - # ---- 4Q: Additional security tests ---- - with subtest("DELETE project without auth returns 401"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/projects/{project_id}" - ) - assert code.strip() == "401", f"Expected 401 for unauthenticated DELETE, got {code.strip()}" - - with subtest("PUT project without auth returns 401"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X PUT http://127.0.0.1:3000/api/v1/projects/{project_id} " - "-H 'Content-Type: application/json' " - "-d '{\"description\": \"hacked\"}'" - ) - assert code.strip() == "401", f"Expected 401 for unauthenticated PUT, got {code.strip()}" - - with subtest("POST channel without auth returns 401"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/channels " - "-H 'Content-Type: application/json' " - "-d '{\"project_id\": \"00000000-0000-0000-0000-000000000000\", \"name\": \"x\", \"jobset_id\": \"00000000-0000-0000-0000-000000000000\"}'" - ) - assert code.strip() == "401", f"Expected 401 for unauthenticated channel create, got {code.strip()}" - - with subtest("API returns JSON error body for 404"): - result = machine.succeed( - "curl -sf http://127.0.0.1:3000/api/v1/projects/00000000-0000-0000-0000-000000000001 2>&1 || " - "curl -s http://127.0.0.1:3000/api/v1/projects/00000000-0000-0000-0000-000000000001" - ) - parsed = json.loads(result) - assert "error" in parsed or "error_code" in parsed, f"Expected JSON error body, got: {result}" - - with subtest("Nonexistent API route returns 404"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/nonexistent" - ) - # Axum returns 404 for unmatched routes - assert code.strip() in ("404", "405"), f"Expected 404/405 for nonexistent route, got {code.strip()}" - - with subtest("HEAD request to health returns 200"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' -I http://127.0.0.1:3000/health" - ) - assert code.strip() == "200", f"Expected 200 for HEAD /health, got {code.strip()}" - - with subtest("OPTIONS request returns valid response"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X OPTIONS http://127.0.0.1:3000/api/v1/projects" - ) - # Axum may return 200, 204, or 405 depending on CORS configuration - assert code.strip() in ("200", "204", "405"), f"Expected 200/204/405 for OPTIONS, got {code.strip()}" - - # ======================================================================== - # Phase 5: New Feature Tests (Structured Logging, Flake Probe, Setup Wizard, Dashboard) - # ======================================================================== - - # ---- 5A: Structured logging ---- - with subtest("Server produces structured log output"): - # The server should log via tracing with the configured format - result = machine.succeed("journalctl -u fc-server --no-pager -n 50 2>&1") - # With compact/full format, tracing outputs level and target info - assert "INFO" in result or "info" in result, \ - "Expected structured log lines with INFO level in journalctl output" - - # ---- 5B: Static CSS serving ---- - with subtest("Static CSS endpoint returns 200 with correct content type"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/static/style.css" - ) - assert code.strip() == "200", f"Expected 200 for /static/style.css, got {code.strip()}" - ct = machine.succeed( - "curl -s -D - -o /dev/null http://127.0.0.1:3000/static/style.css | grep -i content-type" - ) - assert "text/css" in ct.lower(), f"Expected text/css, got: {ct}" - - # ---- 5C: Setup wizard page ---- - with subtest("Setup wizard page returns 200"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:3000/projects/new" - ) - assert code.strip() == "200", f"Expected 200 for /projects/new, got {code.strip()}" - - with subtest("Setup wizard page contains wizard steps"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/projects/new") - assert "Step 1" in body, "Setup wizard should contain Step 1" - assert "Repository URL" in body, "Setup wizard should contain URL input" - assert "probeRepo" in body, "Setup wizard should contain probe JS function" - - with subtest("Projects page links to setup wizard"): - # Login first to get admin view - cookie = machine.succeed( - "curl -s -D - -o /dev/null " - "-X POST http://127.0.0.1:3000/login " - f"-d 'api_key={api_token}' " - "| grep -i set-cookie | head -1" - ) - match = re.search(r'fc_session=([^;]+)', cookie) - if match: - session_val = match.group(1) - body = machine.succeed( - f"curl -sf -H 'Cookie: fc_session={session_val}' http://127.0.0.1:3000/projects" - ) - assert '/projects/new' in body, "Projects page should link to /projects/new wizard" - - # ---- 5D: Flake probe endpoint ---- - with subtest("Probe endpoint exists and requires POST"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/projects/probe" - ) - # GET should return 405 (Method Not Allowed) - assert code.strip() in ("404", "405"), f"Expected 404/405 for GET /probe, got {code.strip()}" - - with subtest("Probe endpoint accepts POST with auth"): - # This will likely fail since the VM has no network access to github, - # but we can verify the endpoint exists and returns a proper error - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects/probe " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"repository_url\": \"https://github.com/nonexistent/repo\"}'" - ) - # Should return 408 (timeout), 422 (nix eval error), 500, or 200 with is_flake=false - # Any non-crash response is acceptable - assert code.strip() in ("200", "408", "422", "500"), \ - f"Expected 200/408/422/500 for probe of unreachable repo, got {code.strip()}" - - # ---- 5E: Setup endpoint ---- - with subtest("Setup endpoint exists and requires POST"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "http://127.0.0.1:3000/api/v1/projects/setup" - ) - assert code.strip() in ("404", "405"), f"Expected 404/405 for GET /setup, got {code.strip()}" - - with subtest("Setup endpoint creates project with jobsets"): - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/setup " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"repository_url\": \"https://github.com/test/setup-test\", \"name\": \"setup-test\", \"description\": \"Created via setup\", \"jobsets\": [{\"name\": \"packages\", \"nix_expression\": \"packages\"}]}' " - "| jq -r .project.id" - ) - setup_project_id = result.strip() - assert len(setup_project_id) == 36, f"Expected UUID from setup, got '{setup_project_id}'" - - with subtest("Setup-created project has jobsets"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/projects/{setup_project_id}/jobsets | jq '.items | length'" - ) - assert int(result.strip()) == 1, f"Expected 1 jobset from setup, got {result.strip()}" - - with subtest("Setup endpoint with read-only key returns 403"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - "-X POST http://127.0.0.1:3000/api/v1/projects/setup " - f"{ro_header} " - "-H 'Content-Type: application/json' " - "-d '{\"repository_url\": \"https://github.com/test/ro\", \"name\": \"ro-setup\", \"jobsets\": []}'" - ) - assert code.strip() == "403", f"Expected 403 for read-only setup, got {code.strip()}" - - # Clean up setup-test project - machine.succeed( - f"curl -sf -X DELETE http://127.0.0.1:3000/api/v1/projects/{setup_project_id} " - f"{auth_header}" - ) - - # ---- 5F: Dashboard improvements ---- - with subtest("Home page has dashboard-grid two-column layout"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/") - assert "dashboard-grid" in body, "Home page should have dashboard-grid class" - - with subtest("Home page has colored stat values"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/") - assert "stat-value-green" in body, "Home page should have green stat value for completed" - assert "stat-value-red" in body, "Home page should have red stat value for failed" - - with subtest("Home page has escapeHtml utility"): - body = machine.succeed("curl -sf http://127.0.0.1:3000/") - assert "escapeHtml" in body, "Home page should include escapeHtml function" - - with subtest("Admin page JS uses escapeHtml for error handling"): - # Login to get admin view - if match: - body = machine.succeed( - f"curl -sf -H 'Cookie: fc_session={session_val}' http://127.0.0.1:3000/admin" - ) - assert "escapeHtml" in body, "Admin page JS should use escapeHtml" - - # ---- 4R: Metrics reflect actual data ---- - with subtest("Metrics fc_projects_total reflects created projects"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/metrics") - for line in result.split("\n"): - if line.startswith("fc_projects_total"): - val = int(line.split()[-1]) - assert val >= 3, f"Expected fc_projects_total >= 3, got {val}" - break - - with subtest("Metrics fc_evaluations_total reflects triggered evaluation"): - result = machine.succeed("curl -sf http://127.0.0.1:3000/metrics") - for line in result.split("\n"): - if line.startswith("fc_evaluations_total"): - val = int(line.split()[-1]) - assert val >= 1, f"Expected fc_evaluations_total >= 1, got {val}" - break - - # ======================================================================== - # Phase E2E-1: End-to-End Evaluator Integration Test - # ======================================================================== - - # ---- Create a test flake inside the VM ---- - with subtest("Create bare git repo with test flake"): - machine.succeed("mkdir -p /var/lib/fc/test-repos") - machine.succeed("git init --bare /var/lib/fc/test-repos/test-flake.git") - - # Create a working copy, write the flake, commit, push - machine.succeed("mkdir -p /tmp/test-flake-work") - machine.succeed("cd /tmp/test-flake-work && git init") - machine.succeed("cd /tmp/test-flake-work && git config user.email 'test@fc' && git config user.name 'FC Test'") - - # Write a minimal flake.nix that builds a simple derivation - machine.succeed(""" - cat > /tmp/test-flake-work/flake.nix << 'FLAKE' - { - description = "FC CI test flake"; - outputs = { self, ... }: { - packages.x86_64-linux.hello = derivation { - name = "fc-test-hello"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo hello > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'initial flake'") - machine.succeed("cd /tmp/test-flake-work && git remote add origin /var/lib/fc/test-repos/test-flake.git") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Set ownership for fc user - machine.succeed("chown -R fc:fc /var/lib/fc/test-repos") - - # ---- Create project + jobset pointing to the local repo via API ---- - with subtest("Create E2E project and jobset via API"): - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/projects " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"e2e-test\", \"repository_url\": \"https://github.com/nixos/nixpkgs\"}' " - "| jq -r .id" - ) - e2e_project_id = result.strip() - assert len(e2e_project_id) == 36, f"Expected UUID, got '{e2e_project_id}'" - - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"packages\", \"nix_expression\": \"packages\", \"flake_mode\": true, \"enabled\": true, \"check_interval\": 5, \"branch\": null, \"scheduling_shares\": 100}' " - "| jq -r .id" - ) - e2e_jobset_id = result.strip() - assert len(e2e_jobset_id) == 36, f"Expected UUID for jobset, got '{e2e_jobset_id}'" - - # ---- Wait for evaluator to pick it up and create an evaluation ---- - with subtest("Evaluator discovers and evaluates the flake"): - # The evaluator is already running (started in Phase 1) - # Poll for evaluation to appear with status "completed" - machine.wait_until_succeeds( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' " - "| jq -e '.items[] | select(.status==\"completed\")'", - timeout=90 - ) - - with subtest("Evaluation created builds with valid drv_path"): - # Get evaluation ID - e2e_eval_id = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' " - "| jq -r '.items[] | select(.status==\"completed\") | .id' | head -1" - ).strip() - assert len(e2e_eval_id) == 36, f"Expected UUID for evaluation, got '{e2e_eval_id}'" - - # Verify builds were created - result = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/builds?evaluation_id={e2e_eval_id}' | jq '.items | length'" - ) - build_count = int(result.strip()) - assert build_count >= 1, f"Expected >= 1 build, got {build_count}" - - # Verify build has valid drv_path - drv_path = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/builds?evaluation_id={e2e_eval_id}' | jq -r '.items[0].drv_path'" - ).strip() - assert drv_path.startswith("/nix/store/"), f"Expected /nix/store/ drv_path, got '{drv_path}'" - - # Get the build ID for later - e2e_build_id = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/builds?evaluation_id={e2e_eval_id}' | jq -r '.items[0].id'" - ).strip() - - # ---- Test evaluation caching ---- - with subtest("Same commit does not trigger a new evaluation"): - # Get current evaluation count - before_count = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' | jq '.items | length'" - ).strip() - # Wait a poll cycle - time.sleep(10) - after_count = machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' | jq '.items | length'" - ).strip() - assert before_count == after_count, f"Evaluation count changed from {before_count} to {after_count} (should be cached)" - - # ---- Test new commit triggers new evaluation ---- - with subtest("New commit triggers new evaluation"): - before_count_int = int(machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' | jq '.items | length'" - ).strip()) - - # Push a new commit - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake v2"; - outputs = { self, ... }: { - packages.x86_64-linux.hello = derivation { - name = "fc-test-hello-v2"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo hello-v2 > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'v2 update'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluator to detect and create new evaluation - machine.wait_until_succeeds( - f"test $(curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' | jq '.items | length') -gt {before_count_int}", - timeout=60 - ) - - # ======================================================================== - # Phase E2E-2: End-to-End Queue Runner Integration Test - # ======================================================================== - - with subtest("Queue runner builds pending derivation"): - # Poll the E2E build until completed (queue-runner is already running) - machine.wait_until_succeeds( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id} | jq -e 'select(.status==\"completed\")'", - timeout=120 - ) - - with subtest("Completed build has output path"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id} | jq -r .build_output_path" - ).strip() - assert result != "null" and result.startswith("/nix/store/"), \ - f"Expected /nix/store/ output path, got '{result}'" - - with subtest("Build steps recorded"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id}/steps | jq 'length'" - ) - assert int(result.strip()) >= 1, f"Expected >= 1 build step, got {result.strip()}" - - # Verify exit_code = 0 - exit_code = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id}/steps | jq '.[0].exit_code'" - ).strip() - assert exit_code == "0", f"Expected exit_code 0, got {exit_code}" - - with subtest("Build products created"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id}/products | jq 'length'" - ) - assert int(result.strip()) >= 1, f"Expected >= 1 build product, got {result.strip()}" - - # Verify product has valid path - product_path = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id}/products | jq -r '.[0].path'" - ).strip() - assert product_path.startswith("/nix/store/"), f"Expected /nix/store/ product path, got '{product_path}'" - - with subtest("Build log exists"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"http://127.0.0.1:3000/api/v1/builds/{e2e_build_id}/log" - ).strip() - assert code == "200", f"Expected 200 for build log, got {code}" - - # ======================================================================== - # Phase E2E-3: Jobset Input Management API - # ======================================================================== - - with subtest("Create jobset input via API"): - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets/{e2e_jobset_id}/inputs " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"nixpkgs\", \"input_type\": \"git\", \"value\": \"https://github.com/NixOS/nixpkgs\"}'" - ) - input_data = json.loads(result) - assert "id" in input_data, f"Expected id in response: {result}" - e2e_input_id = input_data["id"] - - with subtest("List jobset inputs"): - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets/{e2e_jobset_id}/inputs | jq 'length'" - ) - assert int(result.strip()) >= 1, f"Expected >= 1 input, got {result.strip()}" - - with subtest("Read-only key cannot create jobset input"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets/{e2e_jobset_id}/inputs " - f"{ro_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"test\", \"input_type\": \"string\", \"value\": \"hello\"}'" - ).strip() - assert code == "403", f"Expected 403 for read-only input create, got {code}" - - with subtest("Delete jobset input"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets/{e2e_jobset_id}/inputs/{e2e_input_id} " - f"{auth_header}" - ).strip() - assert code == "200", f"Expected 200 for input delete, got {code}" - - with subtest("Read-only key cannot delete jobset input"): - # Re-create first - result = machine.succeed( - f"curl -sf -X POST http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets/{e2e_jobset_id}/inputs " - f"{auth_header} " - "-H 'Content-Type: application/json' " - "-d '{\"name\": \"test-ro\", \"input_type\": \"string\", \"value\": \"test\"}'" - ) - tmp_input_id = json.loads(result)["id"] - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets/{e2e_jobset_id}/inputs/{tmp_input_id} " - f"{ro_header}" - ).strip() - assert code == "403", f"Expected 403 for read-only input delete, got {code}" - - # ======================================================================== - # Phase E2E-4: Notification Dispatch - # ======================================================================== - - # Notifications are dispatched after builds complete (already tested above). - # Verify run_command notifications work: - with subtest("Notification run_command is invoked on build completion"): - # This tests that the notification system dispatches properly. - # The actual run_command config is not set in this VM, so we just verify - # the build status was updated correctly after notification dispatch. - result = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id} | jq -r .status" - ).strip() - assert result == "completed", f"Expected completed after notification, got {result}" - - # ======================================================================== - # Phase E2E-5: Channel Auto-Promotion - # ======================================================================== - - with subtest("Channel auto-promotion after all builds complete"): - # Create a channel tracking the E2E jobset - result = machine.succeed( - "curl -sf -X POST http://127.0.0.1:3000/api/v1/channels " - f"{auth_header} " - "-H 'Content-Type: application/json' " - f"-d '{{\"project_id\": \"{e2e_project_id}\", \"name\": \"e2e-channel\", \"jobset_id\": \"{e2e_jobset_id}\"}}' " - "| jq -r .id" - ) - e2e_channel_id = result.strip() - - # Auto-promotion happens when all builds in an evaluation complete. - # The first evaluation's builds should already be complete. - # Check channel's current_evaluation_id - machine.wait_until_succeeds( - f"curl -sf http://127.0.0.1:3000/api/v1/channels/{e2e_channel_id} " - "| jq -e 'select(.current_evaluation_id != null)'", - timeout=30 - ) - - # ======================================================================== - # Phase E2E-6: Binary Cache NARinfo Test - # ======================================================================== - - with subtest("Binary cache serves NARinfo for built output"): - # Get the build output path - output_path = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{e2e_build_id} | jq -r .build_output_path" - ).strip() - - # Extract the hash from /nix/store/- - hash_match = re.match(r'/nix/store/([a-z0-9]+)-', output_path) - assert hash_match, f"Could not extract hash from output path: {output_path}" - store_hash = hash_match.group(1) - - # Request NARinfo - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"http://127.0.0.1:3000/nix-cache/{store_hash}.narinfo" - ).strip() - assert code == "200", f"Expected 200 for NARinfo, got {code}" - - # Verify NARinfo content has StorePath and NarHash - narinfo = machine.succeed( - f"curl -sf http://127.0.0.1:3000/nix-cache/{store_hash}.narinfo" - ) - assert "StorePath:" in narinfo, f"NARinfo missing StorePath: {narinfo}" - assert "NarHash:" in narinfo, f"NARinfo missing NarHash: {narinfo}" - - # ======================================================================== - # Phase E2E-7: Build Retry on Failure - # ======================================================================== - - with subtest("Build with invalid drv_path fails and retries"): - # Insert a build with an invalid drv_path via SQL - machine.succeed( - "sudo -u postgres psql -d fc -c \"" - "INSERT INTO builds (id, evaluation_id, job_name, drv_path, status, priority, retry_count, max_retries, is_aggregate, signed) " - f"VALUES (gen_random_uuid(), '{e2e_eval_id}', 'bad-build', '/nix/store/invalid-does-not-exist.drv', 'pending', 0, 0, 3, false, false);\"" - ) - - # Wait for queue-runner to attempt the build and fail it - machine.wait_until_succeeds( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=bad-build' " - "| jq -e '.items[] | select(.status==\"failed\")'", - timeout=60 - ) - - # Verify status is failed - result = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=bad-build' | jq -r '.items[0].status'" - ).strip() - assert result == "failed", f"Expected failed for bad build, got '{result}'" - - # ======================================================================== - # Phase E2E-8: Notification Dispatch (run_command) - # ======================================================================== - - with subtest("Notification run_command invoked on build completion"): - # Write a notification script - machine.succeed("mkdir -p /var/lib/fc") - machine.succeed(""" - cat > /var/lib/fc/notify.sh << 'SCRIPT' - #!/bin/sh - echo "BUILD_STATUS=$FC_BUILD_STATUS" >> /var/lib/fc/notify-output - echo "BUILD_ID=$FC_BUILD_ID" >> /var/lib/fc/notify-output - echo "BUILD_JOB=$FC_BUILD_JOB" >> /var/lib/fc/notify-output - SCRIPT - """) - machine.succeed("chmod +x /var/lib/fc/notify.sh") - machine.succeed("chown -R fc:fc /var/lib/fc") - - # Update fc.toml to enable notifications - machine.succeed(""" - cat >> /etc/fc.toml << 'CONFIG' - - [notifications] - run_command = "/var/lib/fc/notify.sh" - CONFIG - """) - - # Restart queue-runner to pick up new config - machine.succeed("systemctl restart fc-queue-runner") - machine.wait_for_unit("fc-queue-runner.service", timeout=30) - - # Create a new simple build to trigger notification - # Push a trivial change to trigger a new evaluation - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake notify"; - outputs = { self, ... }: { - packages.x86_64-linux.notify-test = derivation { - name = "fc-notify-test"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo notify-test > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger notification test'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluator to create new evaluation - machine.wait_until_succeeds( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' " - "| jq '.items | length' | grep -v '^2$'", - timeout=60 - ) - - # Get the new build ID - notify_build_id = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=notify-test' | jq -r '.items[0].id'" - ).strip() - - # Wait for the build to complete - machine.wait_until_succeeds( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{notify_build_id} | jq -e 'select(.status==\"completed\")'", - timeout=120 - ) - - # Wait a bit for notification to dispatch - time.sleep(5) - - # Verify the notification script was executed - machine.wait_for_file("/var/lib/fc/notify-output") - output = machine.succeed("cat /var/lib/fc/notify-output") - assert "BUILD_STATUS=success" in output or "BUILD_STATUS=completed" in output, \ - f"Expected BUILD_STATUS in notification output, got: {output}" - assert notify_build_id in output, f"Expected build ID {notify_build_id} in output, got: {output}" - - # ======================================================================== - # Phase E2E-9: Nix Signing - # ======================================================================== - - with subtest("Generate signing key and configure signing"): - # Generate a Nix signing key - machine.succeed("mkdir -p /var/lib/fc/keys") - machine.succeed("nix-store --generate-binary-cache-key fc-test /var/lib/fc/keys/signing-key /var/lib/fc/keys/signing-key.pub") - machine.succeed("chown -R fc:fc /var/lib/fc/keys") - machine.succeed("chmod 600 /var/lib/fc/keys/signing-key") - - # Update fc.toml to enable signing - machine.succeed(""" - cat >> /etc/fc.toml << 'CONFIG' - - [signing] - enabled = true - key_file = "/var/lib/fc/keys/signing-key" - CONFIG - """) - - # Restart queue-runner to pick up signing config - machine.succeed("systemctl restart fc-queue-runner") - machine.wait_for_unit("fc-queue-runner.service", timeout=30) - - with subtest("Signed builds have valid signatures"): - # Create a new build to test signing - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake signing"; - outputs = { self, ... }: { - packages.x86_64-linux.sign-test = derivation { - name = "fc-sign-test"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo signed-build > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger signing test'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluation - machine.wait_until_succeeds( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' " - "| jq '.items | length' | grep -v '^[23]$'", - timeout=60 - ) - - # Get the sign-test build - sign_build_id = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=sign-test' | jq -r '.items[0].id'" - ).strip() - - # Wait for build to complete - machine.wait_until_succeeds( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{sign_build_id} | jq -e 'select(.status==\"completed\")'", - timeout=120 - ) - - # Verify the build has signed=true - signed = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{sign_build_id} | jq -r .signed" - ).strip() - assert signed == "true", f"Expected signed=true, got {signed}" - - # Get the output path and verify it with nix store verify - output_path = machine.succeed( - f"curl -sf http://127.0.0.1:3000/api/v1/builds/{sign_build_id} | jq -r .build_output_path" - ).strip() - - # Verify the path is signed with our key - # The verify command should succeed (exit 0) if signatures are valid - machine.succeed(f"nix store verify --sigs-needed 1 {output_path}") - - # ======================================================================== - # Phase E2E-10: GC Roots - # ======================================================================== - - with subtest("GC roots are created for build products"): - # Enable GC in config - machine.succeed(""" - cat >> /etc/fc.toml << 'CONFIG' - - [gc] - enabled = true - gc_roots_dir = "/nix/var/nix/gcroots/per-user/fc" - max_age_days = 30 - cleanup_interval = 3600 - CONFIG - """) - - # Restart queue-runner to enable GC - machine.succeed("systemctl restart fc-queue-runner") - machine.wait_for_unit("fc-queue-runner.service", timeout=30) - - # Ensure the gc roots directory exists - machine.succeed("mkdir -p /nix/var/nix/gcroots/per-user/fc") - machine.succeed("chown -R fc:fc /nix/var/nix/gcroots/per-user/fc") - - # Create a new build to test GC root creation - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > flake.nix << 'FLAKE' - { - description = "FC CI test flake gc"; - outputs = { self, ... }: { - packages.x86_64-linux.gc-test = derivation { - name = "fc-gc-test"; - system = "x86_64-linux"; - builder = "/bin/sh"; - args = [ "-c" "echo gc-test > $out" ]; - }; - }; - } - FLAKE - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'trigger gc test'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluation and build - machine.wait_until_succeeds( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=gc-test' | jq -e '.items[] | select(.status==\"completed\")'", - timeout=120 - ) - - # Get the build output path - gc_build_output = machine.succeed( - "curl -sf 'http://127.0.0.1:3000/api/v1/builds?job_name=gc-test' | jq -r '.items[0].build_output_path'" - ).strip() - - # Verify GC root symlink was created - # The symlink should be in /nix/var/nix/gcroots/per-user/fc/ and point to the build output - gc_roots = machine.succeed("find /nix/var/nix/gcroots/per-user/fc -type l 2>/dev/null || true").strip() - - # Check if any symlink points to our build output - if gc_roots: - found_root = False - for root in gc_roots.split('\n'): - if root: - target = machine.succeed(f"readlink -f {root} 2>/dev/null || true").strip() - if target == gc_build_output: - found_root = True - break - - # We might have GC roots - this is expected behavior - # The key is that the build output exists and is protected from GC - machine.succeed(f"test -e {gc_build_output}") - else: - # If no GC roots yet, at least verify the build output exists - # GC roots might be created asynchronously - machine.succeed(f"test -e {gc_build_output}") - - # ======================================================================== - # Phase E2E-11: Declarative In-Repo Config - # ======================================================================== - - with subtest("Declarative .fc.toml in repo auto-creates jobset"): - # Add .fc.toml to the test repo with a new jobset definition - machine.succeed(""" - cd /tmp/test-flake-work && \ - cat > .fc.toml << 'FCTOML' - [[jobsets]] - name = "declarative-checks" - nix_expression = "checks" - flake_mode = true - enabled = true - FCTOML - """) - machine.succeed("cd /tmp/test-flake-work && git add -A && git commit -m 'add declarative config'") - machine.succeed("cd /tmp/test-flake-work && git push origin HEAD:refs/heads/main") - - # Wait for evaluator to pick up the new commit and process declarative config - machine.wait_until_succeeds( - f"curl -sf 'http://127.0.0.1:3000/api/v1/projects/{e2e_project_id}/jobsets' " - "| jq -e '.items[] | select(.name==\"declarative-checks\")'", - timeout=60 - ) - - # ======================================================================== - # Phase E2E-12: Webhook Endpoint - # ======================================================================== - - with subtest("Webhook endpoint accepts valid GitHub push"): - # Create a webhook config via SQL (no REST endpoint for creation) - machine.succeed( - "sudo -u postgres psql -d fc -c \"" - "INSERT INTO webhook_configs (id, project_id, forge_type, secret_hash, enabled) " - f"VALUES (gen_random_uuid(), '{e2e_project_id}', 'github', 'test-secret', true);\"" - ) - - # Get the current evaluation count - before_evals = int(machine.succeed( - f"curl -sf 'http://127.0.0.1:3000/api/v1/evaluations?jobset_id={e2e_jobset_id}' | jq '.items | length'" - ).strip()) - - # Compute HMAC-SHA256 of the payload - payload = '{"ref":"refs/heads/main","after":"abcdef1234567890abcdef1234567890abcdef12","repository":{"clone_url":"file:///var/lib/fc/test-repos/test-flake.git"}}' - - # Generate HMAC with the secret - hmac_sig = machine.succeed( - f"echo -n '{payload}' | openssl dgst -sha256 -hmac 'test-secret' -hex | awk '{{print $2}}'" - ).strip() - - # Send webhook - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/webhooks/{e2e_project_id}/github " - "-H 'Content-Type: application/json' " - f"-H 'X-Hub-Signature-256: sha256={hmac_sig}' " - f"-d '{payload}'" - ).strip() - assert code == "200", f"Expected 200 for webhook, got {code}" - - # Verify the webhook response accepted the push - result = machine.succeed( - "curl -sf " - f"-X POST http://127.0.0.1:3000/api/v1/webhooks/{e2e_project_id}/github " - "-H 'Content-Type: application/json' " - f"-H 'X-Hub-Signature-256: sha256={hmac_sig}' " - f"-d '{payload}' | jq -r .accepted" - ).strip() - assert result == "true", f"Expected webhook accepted=true, got {result}" - - with subtest("Webhook rejects invalid signature"): - payload = '{"ref":"refs/heads/main","after":"deadbeef","repository":{}}' - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X POST http://127.0.0.1:3000/api/v1/webhooks/{e2e_project_id}/github " - "-H 'Content-Type: application/json' " - "-H 'X-Hub-Signature-256: sha256=0000000000000000000000000000000000000000000000000000000000000000' " - f"-d '{payload}'" - ).strip() - assert code == "401", f"Expected 401 for invalid webhook signature, got {code}" - - # ---- 4S: Delete project with auth (cleanup) ---- - with subtest("Delete project with admin key succeeds"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"-X DELETE http://127.0.0.1:3000/api/v1/projects/{project_id} " - f"{auth_header}" - ) - assert code.strip() == "200", f"Expected 200 for admin DELETE project, got {code.strip()}" - - with subtest("Deleted project returns 404"): - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"http://127.0.0.1:3000/api/v1/projects/{project_id}" - ) - assert code.strip() == "404", f"Expected 404 for deleted project, got {code.strip()}" - - with subtest("Cascade delete removes jobsets and evaluations"): - # The jobset and evaluation we created should be gone - code = machine.succeed( - "curl -s -o /dev/null -w '%{http_code}' " - f"http://127.0.0.1:3000/api/v1/evaluations/{test_eval_id}" - ) - assert code.strip() == "404", f"Expected 404 for cascaded evaluation, got {code.strip()}" - ''; -}