nix: attempt to fix VM tests; general cleanup

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I65f6909ef02ab4599f5b0bbc0930367e6a6a6964
This commit is contained in:
raf 2026-02-14 13:55:07 +03:00
commit a2b638d4db
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
26 changed files with 2320 additions and 2939 deletions

View file

@ -198,14 +198,36 @@ impl RequireRoles {
}
/// Session extraction middleware for dashboard routes.
/// Reads `fc_user_session` or `fc_session` cookie and inserts User/ApiKey into
/// extensions if valid.
/// Reads `fc_user_session` or `fc_session` cookie, or Bearer token (API key),
/// and inserts User/ApiKey into extensions if valid.
pub async fn extract_session(
State(state): State<AppState>,
mut request: Request,
next: Next,
) -> Response {
// Extract cookie header first, then clone to end the borrow
// Try Bearer token first (API key auth)
let auth_header = request
.headers()
.get("authorization")
.and_then(|v| v.to_str().ok())
.map(String::from);
if let Some(ref auth_header) = auth_header {
if let Some(token) = auth_header.strip_prefix("Bearer ") {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(token.as_bytes());
let key_hash = hex::encode(hasher.finalize());
if let Ok(Some(api_key)) =
fc_common::repo::api_keys::get_by_hash(&state.pool, &key_hash).await
{
request.extensions_mut().insert(api_key.clone());
}
}
}
// Extract cookie header next
let cookie_header = request
.headers()
.get("cookie")

View file

@ -10,6 +10,54 @@ use tokio::process::Command;
use crate::{error::ApiError, state::AppState};
/// Extract the first path info entry from `nix path-info --json` output,
/// handling both the old array format (`[{"path":...}]`) and the new
/// object-keyed format (`{"/nix/store/...": {...}}`).
fn first_path_info_entry(
parsed: &serde_json::Value,
) -> Option<(&serde_json::Value, Option<&str>)> {
if let Some(arr) = parsed.as_array() {
let entry = arr.first()?;
let path = entry.get("path").and_then(|v| v.as_str());
Some((entry, path))
} else if let Some(obj) = parsed.as_object() {
let (key, val) = obj.iter().next()?;
Some((val, Some(key.as_str())))
} else {
None
}
}
/// Look up a store path by its nix hash, checking both build_products and
/// builds tables.
async fn find_store_path(
pool: &sqlx::PgPool,
hash: &str,
) -> std::result::Result<Option<String>, ApiError> {
let like_pattern = format!("/nix/store/{hash}-%");
let path: Option<String> = sqlx::query_scalar(
"SELECT path FROM build_products WHERE path LIKE $1 LIMIT 1",
)
.bind(&like_pattern)
.fetch_optional(pool)
.await
.map_err(|e| ApiError(fc_common::CiError::Database(e)))?;
if path.is_some() {
return Ok(path);
}
sqlx::query_scalar(
"SELECT build_output_path FROM builds WHERE build_output_path LIKE $1 \
LIMIT 1",
)
.bind(&like_pattern)
.fetch_optional(pool)
.await
.map_err(|e| ApiError(fc_common::CiError::Database(e)))
}
/// Serve `NARInfo` for a store path hash.
/// GET /nix-cache/{hash}.narinfo
async fn narinfo(
@ -27,27 +75,14 @@ async fn narinfo(
return Ok(StatusCode::NOT_FOUND.into_response());
}
// Look up the store path from build_products by matching the hash prefix
let product = sqlx::query_as::<_, fc_common::models::BuildProduct>(
"SELECT * FROM build_products WHERE path LIKE $1 LIMIT 1",
)
.bind(format!("/nix/store/{hash}-%"))
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError(fc_common::CiError::Database(e)))?;
let product = match product {
Some(p) => p,
None => return Ok(StatusCode::NOT_FOUND.into_response()),
let store_path = match find_store_path(&state.pool, hash).await? {
Some(p) if fc_common::validate::is_valid_store_path(&p) => p,
_ => return Ok(StatusCode::NOT_FOUND.into_response()),
};
if !fc_common::validate::is_valid_store_path(&product.path) {
return Ok(StatusCode::NOT_FOUND.into_response());
}
// Get narinfo from nix path-info
let output = Command::new("nix")
.args(["path-info", "--json", &product.path])
.args(["path-info", "--json", &store_path])
.output()
.await;
@ -62,7 +97,7 @@ async fn narinfo(
Err(_) => return Ok(StatusCode::NOT_FOUND.into_response()),
};
let entry = match parsed.as_array().and_then(|a| a.first()) {
let (entry, path_from_info) = match first_path_info_entry(&parsed) {
Some(e) => e,
None => return Ok(StatusCode::NOT_FOUND.into_response()),
};
@ -72,10 +107,7 @@ async fn narinfo(
.get("narSize")
.and_then(serde_json::Value::as_u64)
.unwrap_or(0);
let store_path = entry
.get("path")
.and_then(|v| v.as_str())
.unwrap_or(&product.path);
let store_path = path_from_info.unwrap_or(&store_path);
let refs: Vec<&str> = entry
.get("references")
@ -174,11 +206,8 @@ async fn sign_narinfo(narinfo: &str, key_file: &std::path::Path) -> String {
if let Ok(o) = re_output
&& let Ok(parsed) =
serde_json::from_slice::<serde_json::Value>(&o.stdout)
&& let Some(sigs) = parsed
.as_array()
.and_then(|a| a.first())
.and_then(|e| e.get("signatures"))
.and_then(|v| v.as_array())
&& let Some((entry, _)) = first_path_info_entry(&parsed)
&& let Some(sigs) = entry.get("signatures").and_then(|v| v.as_array())
{
let sig_lines: Vec<String> = sigs
.iter()
@ -214,26 +243,14 @@ async fn serve_nar_zst(
return Ok(StatusCode::NOT_FOUND.into_response());
}
let product = sqlx::query_as::<_, fc_common::models::BuildProduct>(
"SELECT * FROM build_products WHERE path LIKE $1 LIMIT 1",
)
.bind(format!("/nix/store/{hash}-%"))
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError(fc_common::CiError::Database(e)))?;
let product = match product {
Some(p) => p,
None => return Ok(StatusCode::NOT_FOUND.into_response()),
let store_path = match find_store_path(&state.pool, hash).await? {
Some(p) if fc_common::validate::is_valid_store_path(&p) => p,
_ => return Ok(StatusCode::NOT_FOUND.into_response()),
};
if !fc_common::validate::is_valid_store_path(&product.path) {
return Ok(StatusCode::NOT_FOUND.into_response());
}
// Use two piped processes instead of sh -c to prevent command injection
let mut nix_child = std::process::Command::new("nix")
.args(["store", "dump-path", &product.path])
.args(["store", "dump-path", &store_path])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null())
.spawn()
@ -290,25 +307,13 @@ async fn serve_nar(
return Ok(StatusCode::NOT_FOUND.into_response());
}
let product = sqlx::query_as::<_, fc_common::models::BuildProduct>(
"SELECT * FROM build_products WHERE path LIKE $1 LIMIT 1",
)
.bind(format!("/nix/store/{hash}-%"))
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError(fc_common::CiError::Database(e)))?;
let product = match product {
Some(p) => p,
None => return Ok(StatusCode::NOT_FOUND.into_response()),
let store_path = match find_store_path(&state.pool, hash).await? {
Some(p) if fc_common::validate::is_valid_store_path(&p) => p,
_ => return Ok(StatusCode::NOT_FOUND.into_response()),
};
if !fc_common::validate::is_valid_store_path(&product.path) {
return Ok(StatusCode::NOT_FOUND.into_response());
}
let child = Command::new("nix")
.args(["store", "dump-path", &product.path])
.args(["store", "dump-path", &store_path])
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null())
.spawn();

View file

@ -50,9 +50,29 @@ async fn create_channel(
input
.validate()
.map_err(|msg| ApiError(fc_common::CiError::Validation(msg)))?;
let jobset_id = input.jobset_id;
let channel = fc_common::repo::channels::create(&state.pool, input)
.await
.map_err(ApiError)?;
// Catch-up: if the jobset already has a completed evaluation, promote now
if let Ok(Some(eval)) =
fc_common::repo::evaluations::get_latest(&state.pool, jobset_id).await
{
if eval.status == fc_common::models::EvaluationStatus::Completed {
let _ = fc_common::repo::channels::auto_promote_if_complete(
&state.pool,
jobset_id,
eval.id,
)
.await;
}
}
// Re-fetch to include any promotion
let channel = fc_common::repo::channels::get(&state.pool, channel.id)
.await
.map_err(ApiError)?;
Ok(Json(channel))
}

View file

@ -3,7 +3,7 @@ use axum::{
Form,
Router,
extract::{Path, Query, State},
http::Extensions,
http::{Extensions, StatusCode},
response::{Html, IntoResponse, Redirect, Response},
routing::get,
};
@ -1271,12 +1271,15 @@ async fn login_action(
let tmpl = LoginTemplate {
error: Some("Invalid username or password".to_string()),
};
return Html(
tmpl
.render()
.unwrap_or_else(|e| format!("Template error: {e}")),
return (
StatusCode::UNAUTHORIZED,
Html(
tmpl
.render()
.unwrap_or_else(|e| format!("Template error: {e}")),
),
)
.into_response();
.into_response();
}
}

View file

@ -54,6 +54,15 @@ struct SystemDistributionResponse {
counts: Vec<i64>,
}
/// Escape a string for use as a Prometheus label value.
/// Per the exposition format, backslash, double-quote, and newline must be
/// escaped.
fn escape_prometheus_label(s: &str) -> String {
s.replace('\\', "\\\\")
.replace('"', "\\\"")
.replace('\n', "\\n")
}
async fn prometheus_metrics(State(state): State<AppState>) -> Response {
let stats = match fc_common::repo::builds::get_stats(&state.pool).await {
Ok(s) => s,
@ -216,8 +225,9 @@ async fn prometheus_metrics(State(state): State<AppState>) -> Response {
);
output.push_str("# TYPE fc_project_builds_completed gauge\n");
for (name, completed, _) in &per_project {
let escaped = escape_prometheus_label(name);
output.push_str(&format!(
"fc_project_builds_completed{{project=\"{name}\"}} {completed}\n"
"fc_project_builds_completed{{project=\"{escaped}\"}} {completed}\n"
));
}
output.push_str(
@ -225,8 +235,9 @@ async fn prometheus_metrics(State(state): State<AppState>) -> Response {
);
output.push_str("# TYPE fc_project_builds_failed gauge\n");
for (name, _, failed) in &per_project {
let escaped = escape_prometheus_label(name);
output.push_str(&format!(
"fc_project_builds_failed{{project=\"{name}\"}} {failed}\n"
"fc_project_builds_failed{{project=\"{escaped}\"}} {failed}\n"
));
}
}

View file

@ -45,7 +45,6 @@ static STYLE_CSS: &str = include_str!("../../static/style.css");
struct RateLimitState {
requests: DashMap<IpAddr, Vec<Instant>>,
_rps: u64,
burst: u32,
last_cleanup: std::sync::atomic::AtomicU64,
}
@ -180,9 +179,9 @@ pub fn router(state: AppState, config: &ServerConfig) -> Router {
if let (Some(rps), Some(burst)) =
(config.rate_limit_rps, config.rate_limit_burst)
{
let _ = rps; // rate_limit_rps reserved for future use
let rl_state = Arc::new(RateLimitState {
requests: DashMap::new(),
_rps: rps,
burst,
last_cleanup: std::sync::atomic::AtomicU64::new(0),
});

File diff suppressed because it is too large Load diff