treewide: address all clippy lints

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I5cf55cc4cb558c3f9f764c71224e87176a6a6964
This commit is contained in:
raf 2026-02-27 21:50:35 +03:00
commit a127f3f62c
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
63 changed files with 1790 additions and 1089 deletions

View file

@ -15,6 +15,11 @@ use crate::state::AppState;
/// Write endpoints (POST/PUT/DELETE/PATCH) require a valid key.
/// Read endpoints (GET/HEAD/OPTIONS) try to extract optionally (for
/// dashboard admin UI).
///
/// # Errors
///
/// Returns unauthorized status if no valid authentication is found for write
/// operations.
pub async fn require_api_key(
State(state): State<AppState>,
mut request: Request,
@ -164,6 +169,12 @@ impl FromRequestParts<AppState> for RequireAdmin {
pub struct RequireRoles;
impl RequireRoles {
/// Check if the session has one of the allowed roles. Admin always passes.
///
/// # Errors
///
/// Returns unauthorized or forbidden status if authentication fails or role
/// is insufficient.
pub fn check(
extensions: &axum::http::Extensions,
allowed: &[&str],
@ -212,18 +223,29 @@ pub async fn extract_session(
.and_then(|v| v.to_str().ok())
.map(String::from);
if let Some(ref auth_header) = auth_header {
if let Some(token) = auth_header.strip_prefix("Bearer ") {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(token.as_bytes());
let key_hash = hex::encode(hasher.finalize());
if let Some(ref auth_header) = auth_header
&& let Some(token) = auth_header.strip_prefix("Bearer ")
{
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(token.as_bytes());
let key_hash = hex::encode(hasher.finalize());
if let Ok(Some(api_key)) =
fc_common::repo::api_keys::get_by_hash(&state.pool, &key_hash).await
{
request.extensions_mut().insert(api_key.clone());
}
if let Ok(Some(api_key)) =
fc_common::repo::api_keys::get_by_hash(&state.pool, &key_hash).await
{
// Update last used timestamp asynchronously
let pool = state.pool.clone();
let key_id = api_key.id;
tokio::spawn(async move {
if let Err(e) =
fc_common::repo::api_keys::touch_last_used(&pool, key_id).await
{
tracing::warn!(error = %e, "Failed to update API key last_used timestamp");
}
});
request.extensions_mut().insert(api_key);
}
}
@ -273,16 +295,13 @@ pub async fn extract_session(
}
fn parse_cookie(header: &str, name: &str) -> Option<String> {
header
.split(';')
.filter_map(|pair| {
let pair = pair.trim();
let (k, v) = pair.split_once('=')?;
if k.trim() == name {
Some(v.trim().to_string())
} else {
None
}
})
.next()
header.split(';').find_map(|pair| {
let pair = pair.trim();
let (k, v) = pair.split_once('=')?;
if k.trim() == name {
Some(v.trim().to_string())
} else {
None
}
})
}

View file

@ -96,7 +96,7 @@ async fn system_status(
.await
.map_err(|e| ApiError(fc_common::CiError::Database(e)))?;
let stats = fc_common::repo::builds::get_stats(pool)
let build_stats = fc_common::repo::builds::get_stats(pool)
.await
.map_err(ApiError)?;
let builders = fc_common::repo::remote_builders::count(pool)
@ -112,10 +112,10 @@ async fn system_status(
projects_count: projects.0,
jobsets_count: jobsets.0,
evaluations_count: evaluations.0,
builds_pending: stats.pending_builds.unwrap_or(0),
builds_running: stats.running_builds.unwrap_or(0),
builds_completed: stats.completed_builds.unwrap_or(0),
builds_failed: stats.failed_builds.unwrap_or(0),
builds_pending: build_stats.pending_builds.unwrap_or(0),
builds_running: build_stats.running_builds.unwrap_or(0),
builds_completed: build_stats.completed_builds.unwrap_or(0),
builds_failed: build_stats.failed_builds.unwrap_or(0),
remote_builders: builders,
channels_count: channels.0,
}))

View file

@ -29,11 +29,8 @@ async fn build_badge(
.map_err(ApiError)?;
let jobset = jobsets.iter().find(|j| j.name == jobset_name);
let jobset = match jobset {
Some(j) => j,
None => {
return Ok(shield_svg("build", "not found", "#9f9f9f").into_response());
},
let Some(jobset) = jobset else {
return Ok(shield_svg("build", "not found", "#9f9f9f").into_response());
};
// Get latest evaluation
@ -41,13 +38,10 @@ async fn build_badge(
.await
.map_err(ApiError)?;
let eval = match eval {
Some(e) => e,
None => {
return Ok(
shield_svg("build", "no evaluations", "#9f9f9f").into_response(),
);
},
let Some(eval) = eval else {
return Ok(
shield_svg("build", "no evaluations", "#9f9f9f").into_response(),
);
};
// Find the build for this job
@ -58,31 +52,24 @@ async fn build_badge(
let build = builds.iter().find(|b| b.job_name == job_name);
let (label, color) = match build {
Some(b) => {
match b.status {
fc_common::BuildStatus::Succeeded => ("passing", "#4c1"),
fc_common::BuildStatus::Failed => ("failing", "#e05d44"),
fc_common::BuildStatus::Running => ("building", "#dfb317"),
fc_common::BuildStatus::Pending => ("queued", "#dfb317"),
fc_common::BuildStatus::Cancelled => ("cancelled", "#9f9f9f"),
fc_common::BuildStatus::DependencyFailed => ("dep failed", "#e05d44"),
fc_common::BuildStatus::Aborted => ("aborted", "#9f9f9f"),
fc_common::BuildStatus::FailedWithOutput => {
("failed output", "#e05d44")
},
fc_common::BuildStatus::Timeout => ("timeout", "#e05d44"),
fc_common::BuildStatus::CachedFailure => ("cached fail", "#e05d44"),
fc_common::BuildStatus::UnsupportedSystem => ("unsupported", "#9f9f9f"),
fc_common::BuildStatus::LogLimitExceeded => ("log limit", "#e05d44"),
fc_common::BuildStatus::NarSizeLimitExceeded => {
("nar limit", "#e05d44")
},
fc_common::BuildStatus::NonDeterministic => ("non-det", "#e05d44"),
}
},
None => ("not found", "#9f9f9f"),
};
let (label, color) = build.map_or(("not found", "#9f9f9f"), |b| {
match b.status {
fc_common::BuildStatus::Succeeded => ("passing", "#4c1"),
fc_common::BuildStatus::Failed => ("failing", "#e05d44"),
fc_common::BuildStatus::Running => ("building", "#dfb317"),
fc_common::BuildStatus::Pending => ("queued", "#dfb317"),
fc_common::BuildStatus::Cancelled => ("cancelled", "#9f9f9f"),
fc_common::BuildStatus::DependencyFailed => ("dep failed", "#e05d44"),
fc_common::BuildStatus::Aborted => ("aborted", "#9f9f9f"),
fc_common::BuildStatus::FailedWithOutput => ("failed output", "#e05d44"),
fc_common::BuildStatus::Timeout => ("timeout", "#e05d44"),
fc_common::BuildStatus::CachedFailure => ("cached fail", "#e05d44"),
fc_common::BuildStatus::UnsupportedSystem => ("unsupported", "#9f9f9f"),
fc_common::BuildStatus::LogLimitExceeded => ("log limit", "#e05d44"),
fc_common::BuildStatus::NarSizeLimitExceeded => ("nar limit", "#e05d44"),
fc_common::BuildStatus::NonDeterministic => ("non-det", "#e05d44"),
}
});
Ok(
(
@ -117,24 +104,16 @@ async fn latest_build(
.map_err(ApiError)?;
let jobset = jobsets.iter().find(|j| j.name == jobset_name);
let jobset = match jobset {
Some(j) => j,
None => {
return Ok((StatusCode::NOT_FOUND, "Jobset not found").into_response());
},
let Some(jobset) = jobset else {
return Ok((StatusCode::NOT_FOUND, "Jobset not found").into_response());
};
let eval = fc_common::repo::evaluations::get_latest(&state.pool, jobset.id)
.await
.map_err(ApiError)?;
let eval = match eval {
Some(e) => e,
None => {
return Ok(
(StatusCode::NOT_FOUND, "No evaluations found").into_response(),
);
},
let Some(eval) = eval else {
return Ok((StatusCode::NOT_FOUND, "No evaluations found").into_response());
};
let builds =
@ -143,10 +122,10 @@ async fn latest_build(
.map_err(ApiError)?;
let build = builds.iter().find(|b| b.job_name == job_name);
match build {
Some(b) => Ok(axum::Json(b.clone()).into_response()),
None => Ok((StatusCode::NOT_FOUND, "Build not found").into_response()),
}
build.map_or_else(
|| Ok((StatusCode::NOT_FOUND, "Build not found").into_response()),
|b| Ok(axum::Json(b.clone()).into_response()),
)
}
fn shield_svg(subject: &str, status: &str, color: &str) -> String {

View file

@ -133,10 +133,10 @@ async fn list_build_products(
async fn build_stats(
State(state): State<AppState>,
) -> Result<Json<fc_common::BuildStats>, ApiError> {
let stats = fc_common::repo::builds::get_stats(&state.pool)
let build_stats = fc_common::repo::builds::get_stats(&state.pool)
.await
.map_err(ApiError)?;
Ok(Json(stats))
Ok(Json(build_stats))
}
async fn recent_builds(
@ -242,13 +242,10 @@ async fn download_build_product(
},
};
let stdout = match child.stdout.take() {
Some(s) => s,
None => {
return Err(ApiError(fc_common::CiError::Build(
"Failed to capture output".to_string(),
)));
},
let Some(stdout) = child.stdout.take() else {
return Err(ApiError(fc_common::CiError::Build(
"Failed to capture output".to_string(),
)));
};
let stream = tokio_util::io::ReaderStream::new(stdout);

View file

@ -28,7 +28,7 @@ fn first_path_info_entry(
}
}
/// Look up a store path by its nix hash, checking both build_products and
/// Look up a store path by its nix hash, checking both `build_products` and
/// builds tables.
async fn find_store_path(
pool: &sqlx::PgPool,
@ -64,6 +64,8 @@ async fn narinfo(
State(state): State<AppState>,
Path(hash): Path<String>,
) -> Result<Response, ApiError> {
use std::fmt::Write;
if !state.config.cache.enabled {
return Ok(StatusCode::NOT_FOUND.into_response());
}
@ -97,9 +99,8 @@ async fn narinfo(
Err(_) => return Ok(StatusCode::NOT_FOUND.into_response()),
};
let (entry, path_from_info) = match first_path_info_entry(&parsed) {
Some(e) => e,
None => return Ok(StatusCode::NOT_FOUND.into_response()),
let Some((entry, path_from_info)) = first_path_info_entry(&parsed) else {
return Ok(StatusCode::NOT_FOUND.into_response());
};
let nar_hash = entry.get("narHash").and_then(|v| v.as_str()).unwrap_or("");
@ -132,8 +133,6 @@ async fn narinfo(
let file_hash = nar_hash;
use std::fmt::Write;
let refs_joined = refs.join(" ");
let mut narinfo_text = format!(
"StorePath: {store_path}\nURL: nar/{hash}.nar.zst\nCompression: \
@ -142,10 +141,10 @@ async fn narinfo(
);
if let Some(deriver) = deriver {
let _ = write!(narinfo_text, "Deriver: {deriver}\n");
let _ = writeln!(narinfo_text, "Deriver: {deriver}");
}
if let Some(ca) = ca {
let _ = write!(narinfo_text, "CA: {ca}\n");
let _ = writeln!(narinfo_text, "CA: {ca}");
}
// Optionally sign if secret key is configured
@ -177,9 +176,8 @@ async fn sign_narinfo(narinfo: &str, key_file: &std::path::Path) -> String {
.find(|l| l.starts_with("StorePath: "))
.and_then(|l| l.strip_prefix("StorePath: "));
let store_path = match store_path {
Some(p) => p,
None => return narinfo.to_string(),
let Some(store_path) = store_path else {
return narinfo.to_string();
};
let output = Command::new("nix")
@ -260,9 +258,8 @@ async fn serve_nar_zst(
))
})?;
let nix_stdout = match nix_child.stdout.take() {
Some(s) => s,
None => return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response()),
let Some(nix_stdout) = nix_child.stdout.take() else {
return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response());
};
let mut zstd_child = Command::new("zstd")
@ -278,9 +275,8 @@ async fn serve_nar_zst(
))
})?;
let zstd_stdout = match zstd_child.stdout.take() {
Some(s) => s,
None => return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response()),
let Some(zstd_stdout) = zstd_child.stdout.take() else {
return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response());
};
let stream = tokio_util::io::ReaderStream::new(zstd_stdout);
@ -320,14 +316,12 @@ async fn serve_nar(
.kill_on_drop(true)
.spawn();
let mut child = match child {
Ok(c) => c,
Err(_) => return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response()),
let Ok(mut child) = child else {
return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response());
};
let stdout = match child.stdout.take() {
Some(s) => s,
None => return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response()),
let Some(stdout) = child.stdout.take() else {
return Ok(StatusCode::INTERNAL_SERVER_ERROR.into_response());
};
let stream = tokio_util::io::ReaderStream::new(stdout);
@ -343,7 +337,7 @@ async fn serve_nar(
)
}
/// Combined NAR handler — dispatches to zstd or plain based on suffix.
/// Dispatches to zstd or plain based on suffix.
/// GET /nix-cache/nar/{hash} where hash includes .nar.zst or .nar suffix
async fn serve_nar_combined(
state: State<AppState>,

View file

@ -63,18 +63,15 @@ async fn create_channel(
// Catch-up: if the jobset already has a completed evaluation, promote now
if let Ok(Some(eval)) =
fc_common::repo::evaluations::get_latest(&state.pool, jobset_id).await
&& eval.status == fc_common::models::EvaluationStatus::Completed
&& let Err(e) = fc_common::repo::channels::auto_promote_if_complete(
&state.pool,
jobset_id,
eval.id,
)
.await
{
if eval.status == fc_common::models::EvaluationStatus::Completed {
if let Err(e) = fc_common::repo::channels::auto_promote_if_complete(
&state.pool,
jobset_id,
eval.id,
)
.await
{
tracing::warn!(jobset_id = %jobset_id, "Failed to auto-promote channel: {e}");
}
}
tracing::warn!(jobset_id = %jobset_id, "Failed to auto-promote channel: {e}");
}
// Re-fetch to include any promotion
@ -159,13 +156,12 @@ async fn nixexprs_tarball(
let _ = writeln!(nix_src, "in {{");
for build in &succeeded {
let output_path = match &build.build_output_path {
Some(p) => p,
None => continue,
let Some(output_path) = &build.build_output_path else {
continue;
};
let system = build.system.as_deref().unwrap_or("x86_64-linux");
// Sanitize job_name for use as a Nix attribute (replace dots/slashes)
let attr_name = build.job_name.replace('.', "-").replace('/', "-");
let attr_name = build.job_name.replace(['.', '/'], "-");
let _ = writeln!(
nix_src,
" \"{attr_name}\" = mkFakeDerivation {{ name = \"{}\"; system = \

View file

@ -46,7 +46,7 @@ struct BuildView {
log_url: String,
}
/// Enhanced build view for queue page with elapsed time and builder info
/// Queue page build info with elapsed time and builder details
struct QueueBuildView {
id: Uuid,
job_name: String,
@ -379,7 +379,7 @@ struct ChannelsTemplate {
channels: Vec<Channel>,
}
/// Enhanced builder view with load and activity info
/// Builder info with load and activity metrics
struct BuilderView {
id: Uuid,
name: String,
@ -455,7 +455,7 @@ async fn home(
State(state): State<AppState>,
extensions: Extensions,
) -> Html<String> {
let stats = fc_common::repo::builds::get_stats(&state.pool)
let build_stats = fc_common::repo::builds::get_stats(&state.pool)
.await
.unwrap_or_default();
let builds = fc_common::repo::builds::list_recent(&state.pool, 10)
@ -499,13 +499,13 @@ async fn home(
last_eval = Some(e);
}
}
let (status, class, time) = match &last_eval {
Some(e) => {
let (status, class, time) = last_eval.as_ref().map_or_else(
|| ("-".into(), "pending".into(), "-".into()),
|e| {
let (t, c) = eval_badge(&e.status);
(t, c, e.evaluation_time.format("%Y-%m-%d %H:%M").to_string())
},
None => ("-".into(), "pending".into(), "-".into()),
};
);
project_summaries.push(ProjectSummaryView {
id: p.id,
name: p.name.clone(),
@ -517,11 +517,11 @@ async fn home(
}
let tmpl = HomeTemplate {
total_builds: stats.total_builds.unwrap_or(0),
completed_builds: stats.completed_builds.unwrap_or(0),
failed_builds: stats.failed_builds.unwrap_or(0),
running_builds: stats.running_builds.unwrap_or(0),
pending_builds: stats.pending_builds.unwrap_or(0),
total_builds: build_stats.total_builds.unwrap_or(0),
completed_builds: build_stats.completed_builds.unwrap_or(0),
failed_builds: build_stats.failed_builds.unwrap_or(0),
running_builds: build_stats.running_builds.unwrap_or(0),
pending_builds: build_stats.pending_builds.unwrap_or(0),
recent_builds: builds.iter().map(build_view).collect(),
recent_evals: evals.iter().map(eval_view).collect(),
projects: project_summaries,
@ -581,9 +581,9 @@ async fn project_page(
Path(id): Path<Uuid>,
extensions: Extensions,
) -> Html<String> {
let project = match fc_common::repo::projects::get(&state.pool, id).await {
Ok(p) => p,
Err(_) => return Html("Project not found".to_string()),
let Ok(project) = fc_common::repo::projects::get(&state.pool, id).await
else {
return Html("Project not found".to_string());
};
let jobsets =
fc_common::repo::jobsets::list_for_project(&state.pool, id, 100, 0)
@ -604,7 +604,7 @@ async fn project_page(
.unwrap_or_default();
evals.append(&mut js_evals);
}
evals.sort_by(|a, b| b.evaluation_time.cmp(&a.evaluation_time));
evals.sort_by_key(|e| std::cmp::Reverse(e.evaluation_time));
evals.truncate(10);
let tmpl = ProjectTemplate {
@ -625,18 +625,13 @@ async fn jobset_page(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Html<String> {
let jobset = match fc_common::repo::jobsets::get(&state.pool, id).await {
Ok(j) => j,
Err(_) => return Html("Jobset not found".to_string()),
let Ok(jobset) = fc_common::repo::jobsets::get(&state.pool, id).await else {
return Html("Jobset not found".to_string());
};
let project = match fc_common::repo::projects::get(
&state.pool,
jobset.project_id,
)
.await
{
Ok(p) => p,
Err(_) => return Html("Project not found".to_string()),
let Ok(project) =
fc_common::repo::projects::get(&state.pool, jobset.project_id).await
else {
return Html("Project not found".to_string());
};
let evals = fc_common::repo::evaluations::list_filtered(
@ -769,24 +764,20 @@ async fn evaluation_page(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Html<String> {
let eval = match fc_common::repo::evaluations::get(&state.pool, id).await {
Ok(e) => e,
Err(_) => return Html("Evaluation not found".to_string()),
let Ok(eval) = fc_common::repo::evaluations::get(&state.pool, id).await
else {
return Html("Evaluation not found".to_string());
};
let jobset =
match fc_common::repo::jobsets::get(&state.pool, eval.jobset_id).await {
Ok(j) => j,
Err(_) => return Html("Jobset not found".to_string()),
};
let project = match fc_common::repo::projects::get(
&state.pool,
jobset.project_id,
)
.await
{
Ok(p) => p,
Err(_) => return Html("Project not found".to_string()),
let Ok(jobset) =
fc_common::repo::jobsets::get(&state.pool, eval.jobset_id).await
else {
return Html("Jobset not found".to_string());
};
let Ok(project) =
fc_common::repo::projects::get(&state.pool, jobset.project_id).await
else {
return Html("Project not found".to_string());
};
let builds = fc_common::repo::builds::list_filtered(
@ -919,31 +910,24 @@ async fn build_page(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Html<String> {
let build = match fc_common::repo::builds::get(&state.pool, id).await {
Ok(b) => b,
Err(_) => return Html("Build not found".to_string()),
let Ok(build) = fc_common::repo::builds::get(&state.pool, id).await else {
return Html("Build not found".to_string());
};
let eval =
match fc_common::repo::evaluations::get(&state.pool, build.evaluation_id)
.await
{
Ok(e) => e,
Err(_) => return Html("Evaluation not found".to_string()),
};
let jobset =
match fc_common::repo::jobsets::get(&state.pool, eval.jobset_id).await {
Ok(j) => j,
Err(_) => return Html("Jobset not found".to_string()),
};
let project = match fc_common::repo::projects::get(
&state.pool,
jobset.project_id,
)
.await
{
Ok(p) => p,
Err(_) => return Html("Project not found".to_string()),
let Ok(eval) =
fc_common::repo::evaluations::get(&state.pool, build.evaluation_id).await
else {
return Html("Evaluation not found".to_string());
};
let Ok(jobset) =
fc_common::repo::jobsets::get(&state.pool, eval.jobset_id).await
else {
return Html("Jobset not found".to_string());
};
let Ok(project) =
fc_common::repo::projects::get(&state.pool, jobset.project_id).await
else {
return Html("Project not found".to_string());
};
let eval_commit_short = if eval.commit_hash.len() > 12 {
@ -1016,12 +1000,10 @@ async fn queue_page(State(state): State<AppState>) -> Html<String> {
let running_builds: Vec<QueueBuildView> = running
.iter()
.map(|b| {
let elapsed = if let Some(started) = b.started_at {
let elapsed = b.started_at.map_or_else(String::new, |started| {
let dur = chrono::Utc::now() - started;
format_elapsed(dur.num_seconds())
} else {
String::new()
};
});
let builder_name =
b.builder_id.and_then(|id| builder_map.get(&id).cloned());
QueueBuildView {
@ -1114,7 +1096,7 @@ async fn admin_page(
.fetch_one(pool)
.await
.unwrap_or((0,));
let stats = fc_common::repo::builds::get_stats(pool)
let build_stats = fc_common::repo::builds::get_stats(pool)
.await
.unwrap_or_default();
let builders_count = fc_common::repo::remote_builders::count(pool)
@ -1129,10 +1111,10 @@ async fn admin_page(
projects_count: projects.0,
jobsets_count: jobsets.0,
evaluations_count: evaluations.0,
builds_pending: stats.pending_builds.unwrap_or(0),
builds_running: stats.running_builds.unwrap_or(0),
builds_completed: stats.completed_builds.unwrap_or(0),
builds_failed: stats.failed_builds.unwrap_or(0),
builds_pending: build_stats.pending_builds.unwrap_or(0),
builds_running: build_stats.running_builds.unwrap_or(0),
builds_completed: build_stats.completed_builds.unwrap_or(0),
builds_failed: build_stats.failed_builds.unwrap_or(0),
remote_builders: builders_count,
channels_count: channels.0,
};
@ -1381,36 +1363,28 @@ async fn logout_action(
.and_then(|v| v.to_str().ok())
{
// Check for user session
if let Some(session_id) = cookie_header
.split(';')
.filter_map(|pair| {
let pair = pair.trim();
let (k, v) = pair.split_once('=')?;
if k.trim() == "fc_user_session" {
Some(v.trim().to_string())
} else {
None
}
})
.next()
{
if let Some(session_id) = cookie_header.split(';').find_map(|pair| {
let pair = pair.trim();
let (k, v) = pair.split_once('=')?;
if k.trim() == "fc_user_session" {
Some(v.trim().to_string())
} else {
None
}
}) {
state.sessions.remove(&session_id);
}
// Check for legacy API key session
if let Some(session_id) = cookie_header
.split(';')
.filter_map(|pair| {
let pair = pair.trim();
let (k, v) = pair.split_once('=')?;
if k.trim() == "fc_session" {
Some(v.trim().to_string())
} else {
None
}
})
.next()
{
if let Some(session_id) = cookie_header.split(';').find_map(|pair| {
let pair = pair.trim();
let (k, v) = pair.split_once('=')?;
if k.trim() == "fc_session" {
Some(v.trim().to_string())
} else {
None
}
}) {
state.sessions.remove(&session_id);
}
}
@ -1556,12 +1530,13 @@ async fn starred_page(
Vec::new()
};
if let Some(build) = builds.first() {
let (text, class) = status_badge(&build.status);
(text, class, Some(build.id))
} else {
("No builds".to_string(), "pending".to_string(), None)
}
builds.first().map_or_else(
|| ("No builds".to_string(), "pending".to_string(), None),
|build| {
let (text, class) = status_badge(&build.status);
(text, class, Some(build.id))
},
)
} else {
("No builds".to_string(), "pending".to_string(), None)
};

View file

@ -93,9 +93,9 @@ async fn stream_build_log(
if active_path.exists() { active_path.clone() } else { final_path.clone() }
};
let file = if let Ok(f) = tokio::fs::File::open(&path).await { f } else {
yield Ok(Event::default().data("Failed to open log file"));
return;
let Ok(file) = tokio::fs::File::open(&path).await else {
yield Ok(Event::default().data("Failed to open log file"));
return;
};
let mut reader = BufReader::new(file);
@ -106,7 +106,7 @@ async fn stream_build_log(
line.clear();
match reader.read_line(&mut line).await {
Ok(0) => {
// EOF check if build is still running
// EOF - check if build is still running
consecutive_empty += 1;
if consecutive_empty > 5 {
// Check build status

View file

@ -21,11 +21,11 @@ struct TimeseriesQuery {
bucket: i32,
}
fn default_hours() -> i32 {
const fn default_hours() -> i32 {
24
}
fn default_bucket() -> i32 {
const fn default_bucket() -> i32 {
60
}
@ -64,21 +64,19 @@ fn escape_prometheus_label(s: &str) -> String {
}
async fn prometheus_metrics(State(state): State<AppState>) -> Response {
let stats = match fc_common::repo::builds::get_stats(&state.pool).await {
Ok(s) => s,
Err(_) => {
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
},
use std::fmt::Write;
let Ok(build_stats) = fc_common::repo::builds::get_stats(&state.pool).await
else {
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
};
let eval_count: i64 =
match sqlx::query_as::<_, (i64,)>("SELECT COUNT(*) FROM evaluations")
sqlx::query_as::<_, (i64,)>("SELECT COUNT(*) FROM evaluations")
.fetch_one(&state.pool)
.await
{
Ok(row) => row.0,
Err(_) => 0,
};
.ok()
.map_or(0, |row| row.0);
let eval_by_status: Vec<(String, i64)> = sqlx::query_as(
"SELECT status::text, COUNT(*) FROM evaluations GROUP BY status",
@ -124,8 +122,6 @@ async fn prometheus_metrics(State(state): State<AppState>) -> Response {
.await
.unwrap_or((None, None, None));
use std::fmt::Write;
let mut output = String::with_capacity(2048);
// Build counts by status
@ -134,27 +130,27 @@ async fn prometheus_metrics(State(state): State<AppState>) -> Response {
let _ = writeln!(
output,
"fc_builds_total{{status=\"succeeded\"}} {}",
stats.completed_builds.unwrap_or(0)
build_stats.completed_builds.unwrap_or(0)
);
let _ = writeln!(
output,
"fc_builds_total{{status=\"failed\"}} {}",
stats.failed_builds.unwrap_or(0)
build_stats.failed_builds.unwrap_or(0)
);
let _ = writeln!(
output,
"fc_builds_total{{status=\"running\"}} {}",
stats.running_builds.unwrap_or(0)
build_stats.running_builds.unwrap_or(0)
);
let _ = writeln!(
output,
"fc_builds_total{{status=\"pending\"}} {}",
stats.pending_builds.unwrap_or(0)
build_stats.pending_builds.unwrap_or(0)
);
let _ = writeln!(
output,
"fc_builds_total{{status=\"all\"}} {}",
stats.total_builds.unwrap_or(0)
build_stats.total_builds.unwrap_or(0)
);
// Build duration stats
@ -166,7 +162,7 @@ async fn prometheus_metrics(State(state): State<AppState>) -> Response {
let _ = writeln!(
output,
"fc_builds_avg_duration_seconds {:.2}",
stats.avg_duration_seconds.unwrap_or(0.0)
build_stats.avg_duration_seconds.unwrap_or(0.0)
);
output.push_str(
@ -214,7 +210,7 @@ async fn prometheus_metrics(State(state): State<AppState>) -> Response {
let _ = writeln!(
output,
"fc_queue_depth {}",
stats.pending_builds.unwrap_or(0)
build_stats.pending_builds.unwrap_or(0)
);
// Infrastructure

View file

@ -44,13 +44,15 @@ use crate::{
static STYLE_CSS: &str = include_str!("../../static/style.css");
/// Helper to generate secure cookie flags based on server configuration.
/// Returns a string containing cookie security attributes: HttpOnly, SameSite,
/// and optionally Secure.
/// Returns a string containing cookie security attributes: `HttpOnly`,
/// `SameSite`, and optionally Secure.
///
/// The Secure flag is set when:
///
/// 1. `force_secure_cookies` is enabled in config (for HTTPS reverse proxies),
/// OR 2. The server is not bound to localhost/127.0.0.1 AND not in permissive
/// mode
/// 2. OR the server is not bound to localhost/127.0.0.1 AND not in permissive
/// mode
#[must_use]
pub fn cookie_security_flags(
config: &fc_common::config::ServerConfig,
) -> String {

View file

@ -89,12 +89,9 @@ fn build_github_client(config: &GitHubOAuthConfig) -> GitHubOAuthClient {
}
async fn github_login(State(state): State<AppState>) -> impl IntoResponse {
let config = match &state.config.oauth.github {
Some(c) => c,
None => {
return (StatusCode::NOT_FOUND, "GitHub OAuth not configured")
.into_response();
},
let Some(config) = &state.config.oauth.github else {
return (StatusCode::NOT_FOUND, "GitHub OAuth not configured")
.into_response();
};
let client = build_github_client(config);
@ -141,13 +138,10 @@ async fn github_callback(
headers: axum::http::HeaderMap,
Query(params): Query<OAuthCallbackParams>,
) -> Result<impl IntoResponse, ApiError> {
let config = match &state.config.oauth.github {
Some(c) => c,
None => {
return Err(ApiError(fc_common::CiError::NotFound(
"GitHub OAuth not configured".to_string(),
)));
},
let Some(config) = &state.config.oauth.github else {
return Err(ApiError(fc_common::CiError::NotFound(
"GitHub OAuth not configured".to_string(),
)));
};
// Verify CSRF token from cookie
@ -290,7 +284,7 @@ async fn github_callback(
};
let clear_state =
format!("fc_oauth_state=; {}; Path=/; Max-Age=0", security_flags);
format!("fc_oauth_state=; {security_flags}; Path=/; Max-Age=0");
let session_cookie = format!(
"fc_user_session={}; {}; Path=/; Max-Age={}",
session.0,
@ -371,21 +365,21 @@ mod tests {
fn test_secure_flag_detection() {
// HTTP should not have Secure flag
let http_uri = "http://localhost:3000/callback";
let secure_flag = if http_uri.starts_with("https://") {
let http_secure_flag = if http_uri.starts_with("https://") {
"; Secure"
} else {
""
};
assert_eq!(secure_flag, "");
assert_eq!(http_secure_flag, "");
// HTTPS should have Secure flag
let https_uri = "https://example.com/callback";
let secure_flag = if https_uri.starts_with("https://") {
let https_secure_flag = if https_uri.starts_with("https://") {
"; Secure"
} else {
""
};
assert_eq!(secure_flag, "; Secure");
assert_eq!(https_secure_flag, "; Secure");
}
#[test]
@ -437,7 +431,7 @@ mod tests {
#[test]
fn test_github_emails_find_primary_verified() {
let emails = vec![
let emails = [
GitHubEmailResponse {
email: "secondary@example.com".to_string(),
primary: false,
@ -467,7 +461,7 @@ mod tests {
#[test]
fn test_github_emails_fallback_to_verified() {
// No primary email, should fall back to first verified
let emails = vec![
let emails = [
GitHubEmailResponse {
email: "unverified@example.com".to_string(),
primary: false,
@ -492,7 +486,7 @@ mod tests {
#[test]
fn test_github_emails_no_verified() {
// No verified emails
let emails = vec![GitHubEmailResponse {
let emails = [GitHubEmailResponse {
email: "unverified@example.com".to_string(),
primary: true,
verified: false,
@ -540,8 +534,8 @@ mod tests {
let max_age = 7 * 24 * 60 * 60;
let cookie = format!(
"fc_user_session={}; HttpOnly; SameSite=Lax; Path=/; Max-Age={}{}",
session_token, max_age, secure_flag
"fc_user_session={session_token}; HttpOnly; SameSite=Lax; Path=/; \
Max-Age={max_age}{secure_flag}"
);
assert!(cookie.contains("fc_user_session=test-session-token"));

View file

@ -159,17 +159,14 @@ async fn handle_github_webhook(
.await
.map_err(ApiError)?;
let webhook_config = match webhook_config {
Some(c) => c,
None => {
return Ok((
StatusCode::NOT_FOUND,
Json(WebhookResponse {
accepted: false,
message: "No GitHub webhook configured for this project".to_string(),
}),
));
},
let Some(webhook_config) = webhook_config else {
return Ok((
StatusCode::NOT_FOUND,
Json(WebhookResponse {
accepted: false,
message: "No GitHub webhook configured for this project".to_string(),
}),
));
};
// Verify signature if secret is configured
@ -299,17 +296,14 @@ async fn handle_github_pull_request(
));
}
let pr = match payload.pull_request {
Some(pr) => pr,
None => {
return Ok((
StatusCode::OK,
Json(WebhookResponse {
accepted: true,
message: "No pull request data, skipping".to_string(),
}),
));
},
let Some(pr) = payload.pull_request else {
return Ok((
StatusCode::OK,
Json(WebhookResponse {
accepted: true,
message: "No pull request data, skipping".to_string(),
}),
));
};
// Skip draft PRs
@ -513,6 +507,8 @@ async fn handle_gitlab_webhook(
headers: HeaderMap,
body: Bytes,
) -> Result<(StatusCode, Json<WebhookResponse>), ApiError> {
use subtle::ConstantTimeEq;
// Check webhook config exists
let webhook_config = repo::webhook_configs::get_by_project_and_forge(
&state.pool,
@ -522,17 +518,14 @@ async fn handle_gitlab_webhook(
.await
.map_err(ApiError)?;
let webhook_config = match webhook_config {
Some(c) => c,
None => {
return Ok((
StatusCode::NOT_FOUND,
Json(WebhookResponse {
accepted: false,
message: "No GitLab webhook configured for this project".to_string(),
}),
));
},
let Some(webhook_config) = webhook_config else {
return Ok((
StatusCode::NOT_FOUND,
Json(WebhookResponse {
accepted: false,
message: "No GitLab webhook configured for this project".to_string(),
}),
));
};
// Verify token if secret is configured
@ -544,7 +537,6 @@ async fn handle_gitlab_webhook(
.unwrap_or("");
// Use constant-time comparison to prevent timing attacks
use subtle::ConstantTimeEq;
let token_matches = token.len() == secret.len()
&& token.as_bytes().ct_eq(secret.as_bytes()).into();
@ -656,17 +648,14 @@ async fn handle_gitlab_merge_request(
)))
})?;
let attrs = match payload.object_attributes {
Some(a) => a,
None => {
return Ok((
StatusCode::OK,
Json(WebhookResponse {
accepted: true,
message: "No merge request attributes, skipping".to_string(),
}),
));
},
let Some(attrs) = payload.object_attributes else {
return Ok((
StatusCode::OK,
Json(WebhookResponse {
accepted: true,
message: "No merge request attributes, skipping".to_string(),
}),
));
};
// Skip draft/WIP merge requests
@ -774,12 +763,13 @@ mod tests {
#[test]
fn test_verify_signature_valid() {
use hmac::{Hmac, Mac};
use sha2::Sha256;
let secret = "test-secret";
let body = b"test-body";
// Compute expected signature
use hmac::{Hmac, Mac};
use sha2::Sha256;
let mut mac = Hmac::<Sha256>::new_from_slice(secret.as_bytes()).unwrap();
mac.update(body);
let expected = hex::encode(mac.finalize().into_bytes());
@ -787,7 +777,7 @@ mod tests {
assert!(verify_signature(
secret,
body,
&format!("sha256={}", expected)
&format!("sha256={expected}")
));
}
@ -800,20 +790,16 @@ mod tests {
#[test]
fn test_verify_signature_wrong_secret() {
let body = b"test-body";
use hmac::{Hmac, Mac};
use sha2::Sha256;
let body = b"test-body";
let mut mac = Hmac::<Sha256>::new_from_slice(b"secret1").unwrap();
mac.update(body);
let sig = hex::encode(mac.finalize().into_bytes());
// Verify with different secret should fail
assert!(!verify_signature(
"secret2",
body,
&format!("sha256={}", sig)
));
assert!(!verify_signature("secret2", body, &format!("sha256={sig}")));
}
#[test]

View file

@ -9,11 +9,11 @@ use sqlx::PgPool;
/// Maximum session lifetime before automatic eviction (24 hours).
const SESSION_MAX_AGE: std::time::Duration =
std::time::Duration::from_secs(24 * 60 * 60);
std::time::Duration::from_hours(24);
/// How often the background cleanup task runs (every 5 minutes).
const SESSION_CLEANUP_INTERVAL: std::time::Duration =
std::time::Duration::from_secs(5 * 60);
std::time::Duration::from_mins(5);
/// Session data supporting both API key and user authentication
#[derive(Clone)]
@ -27,13 +27,10 @@ impl SessionData {
/// Check if the session has admin role
#[must_use]
pub fn is_admin(&self) -> bool {
if let Some(ref user) = self.user {
user.role == "admin"
} else if let Some(ref key) = self.api_key {
key.role == "admin"
} else {
false
}
self.user.as_ref().map_or_else(
|| self.api_key.as_ref().is_some_and(|key| key.role == "admin"),
|user| user.role == "admin",
)
}
/// Check if the session has a specific role
@ -42,25 +39,24 @@ impl SessionData {
if self.is_admin() {
return true;
}
if let Some(ref user) = self.user {
user.role == role
} else if let Some(ref key) = self.api_key {
key.role == role
} else {
false
}
self.user.as_ref().map_or_else(
|| self.api_key.as_ref().is_some_and(|key| key.role == role),
|user| user.role == role,
)
}
/// Get the display name for the session (username or api key name)
#[must_use]
pub fn display_name(&self) -> String {
if let Some(ref user) = self.user {
user.username.clone()
} else if let Some(ref key) = self.api_key {
key.name.clone()
} else {
"Anonymous".to_string()
}
self.user.as_ref().map_or_else(
|| {
self
.api_key
.as_ref()
.map_or_else(|| "Anonymous".to_string(), |key| key.name.clone())
},
|user| user.username.clone(),
)
}
/// Check if this is a user session (not just API key)

View file

@ -1,5 +1,5 @@
//! Integration tests for API endpoints.
//! Requires TEST_DATABASE_URL to be set.
//! Requires `TEST_DATABASE_URL` to be set.
use axum::{
body::Body,
@ -8,12 +8,9 @@ use axum::{
use tower::ServiceExt;
async fn get_pool() -> Option<sqlx::PgPool> {
let url = match std::env::var("TEST_DATABASE_URL") {
Ok(url) => url,
Err(_) => {
println!("Skipping API test: TEST_DATABASE_URL not set");
return None;
},
let Ok(url) = std::env::var("TEST_DATABASE_URL") else {
println!("Skipping API test: TEST_DATABASE_URL not set");
return None;
};
let pool = sqlx::postgres::PgPoolOptions::new()
@ -44,9 +41,8 @@ fn build_app(pool: sqlx::PgPool) -> axum::Router {
#[tokio::test]
async fn test_router_no_duplicate_routes() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let config = fc_common::config::Config::default();
@ -79,9 +75,8 @@ fn build_app_with_config(
#[tokio::test]
async fn test_health_endpoint() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -108,9 +103,8 @@ async fn test_health_endpoint() {
#[tokio::test]
async fn test_project_endpoints() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -204,9 +198,8 @@ async fn test_project_endpoints() {
#[tokio::test]
async fn test_builds_endpoints() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -244,9 +237,8 @@ async fn test_builds_endpoints() {
#[tokio::test]
async fn test_error_response_includes_error_code() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -275,9 +267,8 @@ async fn test_error_response_includes_error_code() {
#[tokio::test]
async fn test_cache_invalid_hash_returns_404() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let mut config = fc_common::config::Config::default();
@ -352,9 +343,8 @@ async fn test_cache_invalid_hash_returns_404() {
#[tokio::test]
async fn test_cache_nar_invalid_hash_returns_404() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let mut config = fc_common::config::Config::default();
@ -390,9 +380,8 @@ async fn test_cache_nar_invalid_hash_returns_404() {
#[tokio::test]
async fn test_cache_disabled_returns_404() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let mut config = fc_common::config::Config::default();
@ -426,9 +415,8 @@ async fn test_cache_disabled_returns_404() {
#[tokio::test]
async fn test_search_rejects_long_query() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -457,9 +445,8 @@ async fn test_search_rejects_long_query() {
#[tokio::test]
async fn test_search_rejects_empty_query() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -486,9 +473,8 @@ async fn test_search_rejects_empty_query() {
#[tokio::test]
async fn test_search_whitespace_only_query() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -514,9 +500,8 @@ async fn test_search_whitespace_only_query() {
#[tokio::test]
async fn test_builds_list_with_system_filter() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -544,9 +529,8 @@ async fn test_builds_list_with_system_filter() {
#[tokio::test]
async fn test_builds_list_with_job_name_filter() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -572,9 +556,8 @@ async fn test_builds_list_with_job_name_filter() {
#[tokio::test]
async fn test_builds_list_combined_filters() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -595,9 +578,8 @@ async fn test_builds_list_combined_filters() {
#[tokio::test]
async fn test_cache_info_returns_correct_headers() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let mut config = fc_common::config::Config::default();
@ -631,9 +613,8 @@ async fn test_cache_info_returns_correct_headers() {
#[tokio::test]
async fn test_metrics_endpoint() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -718,9 +699,8 @@ async fn test_metrics_endpoint() {
#[tokio::test]
async fn test_get_nonexistent_build_returns_error_code() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -750,9 +730,8 @@ async fn test_get_nonexistent_build_returns_error_code() {
#[tokio::test]
async fn test_create_project_validation_rejects_invalid_name() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -786,9 +765,8 @@ async fn test_create_project_validation_rejects_invalid_name() {
#[tokio::test]
async fn test_create_project_validation_rejects_bad_url() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -821,9 +799,8 @@ async fn test_create_project_validation_rejects_bad_url() {
#[tokio::test]
async fn test_create_project_validation_accepts_valid() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -854,14 +831,14 @@ async fn test_create_project_validation_accepts_valid() {
#[tokio::test]
async fn test_project_create_with_auth() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
use sha2::Digest;
let Some(pool) = get_pool().await else {
return;
};
// Create an admin API key
let mut hasher = sha2::Sha256::new();
use sha2::Digest;
hasher.update(b"fc_test_project_auth");
let key_hash = hex::encode(hasher.finalize());
let _ =
@ -900,9 +877,8 @@ async fn test_project_create_with_auth() {
#[tokio::test]
async fn test_project_create_without_auth_rejected() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -929,14 +905,14 @@ async fn test_project_create_without_auth_rejected() {
#[tokio::test]
async fn test_setup_endpoint_creates_project_and_jobsets() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
use sha2::Digest;
let Some(pool) = get_pool().await else {
return;
};
// Create an admin API key
let mut hasher = sha2::Sha256::new();
use sha2::Digest;
hasher.update(b"fc_test_setup_key");
let key_hash = hex::encode(hasher.finalize());
let _ =
@ -991,9 +967,8 @@ async fn test_setup_endpoint_creates_project_and_jobsets() {
#[tokio::test]
async fn test_security_headers_present() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);
@ -1033,9 +1008,8 @@ async fn test_security_headers_present() {
#[tokio::test]
async fn test_static_css_served() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
let app = build_app(pool);

View file

@ -1,5 +1,5 @@
//! End-to-end integration test.
//! Requires TEST_DATABASE_URL to be set.
//! Requires `TEST_DATABASE_URL` to be set.
//! Tests the full flow: create project -> jobset -> evaluation -> builds.
//!
//! Nix-dependent steps are skipped if nix is not available.
@ -12,12 +12,9 @@ use fc_common::models::*;
use tower::ServiceExt;
async fn get_pool() -> Option<sqlx::PgPool> {
let url = match std::env::var("TEST_DATABASE_URL") {
Ok(url) => url,
Err(_) => {
println!("Skipping E2E test: TEST_DATABASE_URL not set");
return None;
},
let Ok(url) = std::env::var("TEST_DATABASE_URL") else {
println!("Skipping E2E test: TEST_DATABASE_URL not set");
return None;
};
let pool = sqlx::postgres::PgPoolOptions::new()
@ -36,9 +33,8 @@ async fn get_pool() -> Option<sqlx::PgPool> {
#[tokio::test]
async fn test_e2e_project_eval_build_flow() {
let pool = match get_pool().await {
Some(p) => p,
None => return,
let Some(pool) = get_pool().await else {
return;
};
// 1. Create a project
@ -254,10 +250,10 @@ async fn test_e2e_project_eval_build_flow() {
assert_eq!(steps[0].exit_code, Some(0));
// 14. Verify build stats reflect our changes
let stats = fc_common::repo::builds::get_stats(&pool)
let build_stats = fc_common::repo::builds::get_stats(&pool)
.await
.expect("get stats");
assert!(stats.completed_builds.unwrap_or(0) >= 2);
assert!(build_stats.completed_builds.unwrap_or(0) >= 2);
// 15. Create a channel and verify it works
let channel = fc_common::repo::channels::create(&pool, CreateChannel {