treewide: format with nightly rustfmt; auto-fix Clippy lints

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I15d9215ab506b37954468d99746098326a6a6964
This commit is contained in:
raf 2026-02-08 02:15:06 +03:00
commit 73919f2f9e
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
30 changed files with 144 additions and 151 deletions

View file

@ -51,6 +51,7 @@ pub enum CiError {
}
impl CiError {
#[must_use]
pub fn is_disk_full(&self) -> bool {
let msg = self.to_string().to_lowercase();
msg.contains("no space left on device")
@ -78,7 +79,7 @@ pub fn check_disk_space(path: &std::path::Path) -> Result<DiskSpaceInfo> {
})?;
let mut statfs: libc::statfs = unsafe { std::mem::zeroed() };
if unsafe { libc::statfs(cpath.as_ptr(), &mut statfs) } != 0 {
if unsafe { libc::statfs(cpath.as_ptr(), &raw mut statfs) } != 0 {
return Err(CiError::Io(std::io::Error::last_os_error()));
}
@ -185,16 +186,19 @@ pub struct DiskSpaceInfo {
impl DiskSpaceInfo {
/// Check if disk space is critically low (less than 1GB available)
#[must_use]
pub fn is_critical(&self) -> bool {
self.available_gb < 1.0
}
/// Check if disk space is low (less than 5GB available)
#[must_use]
pub fn is_low(&self) -> bool {
self.available_gb < 5.0
}
/// Get a human-readable summary
#[must_use]
pub fn summary(&self) -> String {
format!(
"Total: {:.1}GB, Free: {:.1}GB ({:.1}%), Available: {:.1}GB",

View file

@ -8,7 +8,7 @@ use std::{
use tracing::{info, warn};
/// Remove GC root symlinks with mtime older than max_age. Returns count
/// Remove GC root symlinks with mtime older than `max_age`. Returns count
/// removed.
pub fn cleanup_old_roots(
roots_dir: &Path,

View file

@ -15,13 +15,15 @@ impl LogStorage {
}
/// Returns the filesystem path where a build's log should be stored
#[must_use]
pub fn log_path(&self, build_id: &Uuid) -> PathBuf {
self.log_dir.join(format!("{}.log", build_id))
self.log_dir.join(format!("{build_id}.log"))
}
/// Returns the filesystem path for an active (in-progress) build log
#[must_use]
pub fn log_path_for_active(&self, build_id: &Uuid) -> PathBuf {
self.log_dir.join(format!("{}.active.log", build_id))
self.log_dir.join(format!("{build_id}.active.log"))
}
/// Write build log content to file

View file

@ -96,7 +96,7 @@ pub async fn probe_flake(
};
let output =
tokio::time::timeout(std::time::Duration::from_secs(60), async {
tokio::time::timeout(std::time::Duration::from_mins(1), async {
tokio::process::Command::new("nix")
.args([
"--extra-experimental-features",
@ -230,7 +230,7 @@ pub async fn probe_flake(
description: top
.get("description")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
.map(std::string::ToString::to_string),
url: Some(repo_url.to_string()),
};

View file

@ -72,11 +72,11 @@ async fn run_command_notification(cmd: &str, build: &Build, project: &Project) {
match result {
Ok(output) => {
if !output.status.success() {
if output.status.success() {
info!(build_id = %build.id, "RunCommand completed successfully");
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
warn!(build_id = %build.id, "RunCommand failed: {stderr}");
} else {
info!(build_id = %build.id, "RunCommand completed successfully");
}
},
Err(e) => error!(build_id = %build.id, "RunCommand execution failed: {e}"),
@ -90,12 +90,9 @@ async fn set_github_status(
build: &Build,
) {
// Parse owner/repo from URL
let (owner, repo) = match parse_github_repo(repo_url) {
Some(v) => v,
None => {
warn!("Cannot parse GitHub owner/repo from {repo_url}");
return;
},
let (owner, repo) = if let Some(v) = parse_github_repo(repo_url) { v } else {
warn!("Cannot parse GitHub owner/repo from {repo_url}");
return;
};
let (state, description) = match build.status {
@ -125,12 +122,12 @@ async fn set_github_status(
.await
{
Ok(resp) => {
if !resp.status().is_success() {
if resp.status().is_success() {
info!(build_id = %build.id, "Set GitHub commit status: {state}");
} else {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
warn!("GitHub status API returned {status}: {text}");
} else {
info!(build_id = %build.id, "Set GitHub commit status: {state}");
}
},
Err(e) => error!("GitHub status API request failed: {e}"),
@ -145,12 +142,9 @@ async fn set_gitea_status(
build: &Build,
) {
// Parse owner/repo from URL (try to extract from the gitea URL)
let (owner, repo) = match parse_gitea_repo(repo_url, base_url) {
Some(v) => v,
None => {
warn!("Cannot parse Gitea owner/repo from {repo_url}");
return;
},
let (owner, repo) = if let Some(v) = parse_gitea_repo(repo_url, base_url) { v } else {
warn!("Cannot parse Gitea owner/repo from {repo_url}");
return;
};
let (state, description) = match build.status {
@ -177,12 +171,12 @@ async fn set_gitea_status(
.await
{
Ok(resp) => {
if !resp.status().is_success() {
if resp.status().is_success() {
info!(build_id = %build.id, "Set Gitea commit status: {state}");
} else {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
warn!("Gitea status API returned {status}: {text}");
} else {
info!(build_id = %build.id, "Set Gitea commit status: {state}");
}
},
Err(e) => error!("Gitea status API request failed: {e}"),
@ -197,12 +191,9 @@ async fn set_gitlab_status(
build: &Build,
) {
// Parse project path from URL
let project_path = match parse_gitlab_project(repo_url, base_url) {
Some(p) => p,
None => {
warn!("Cannot parse GitLab project from {repo_url}");
return;
},
let project_path = if let Some(p) = parse_gitlab_project(repo_url, base_url) { p } else {
warn!("Cannot parse GitLab project from {repo_url}");
return;
};
// GitLab uses different state names
@ -238,12 +229,12 @@ async fn set_gitlab_status(
.await
{
Ok(resp) => {
if !resp.status().is_success() {
if resp.status().is_success() {
info!(build_id = %build.id, "Set GitLab commit status: {state}");
} else {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
warn!("GitLab status API returned {status}: {text}");
} else {
info!(build_id = %build.id, "Set GitLab commit status: {state}");
}
},
Err(e) => error!("GitLab status API request failed: {e}"),

View file

@ -45,7 +45,7 @@ pub async fn list_for_build(
}
/// Batch check if all dependency builds are completed for multiple builds at
/// once. Returns a map from build_id to whether all deps are completed.
/// once. Returns a map from `build_id` to whether all deps are completed.
pub async fn check_deps_for_builds(
pool: &PgPool,
build_ids: &[Uuid],

View file

@ -179,7 +179,7 @@ pub async fn reset_orphaned(
Ok(result.rows_affected())
}
/// List builds with optional evaluation_id, status, system, and job_name
/// List builds with optional `evaluation_id`, status, system, and `job_name`
/// filters, with pagination.
pub async fn list_filtered(
pool: &PgPool,
@ -305,7 +305,7 @@ pub async fn mark_signed(pool: &PgPool, id: Uuid) -> Result<()> {
}
/// Batch-fetch completed builds by derivation paths.
/// Returns a map from drv_path to Build for deduplication.
/// Returns a map from `drv_path` to Build for deduplication.
pub async fn get_completed_by_drv_paths(
pool: &PgPool,
drv_paths: &[String],
@ -330,7 +330,7 @@ pub async fn get_completed_by_drv_paths(
)
}
/// Set the builder_id for a build.
/// Set the `builder_id` for a build.
pub async fn set_builder(
pool: &PgPool,
id: Uuid,

View file

@ -58,7 +58,7 @@ pub async fn list_for_jobset(
.map_err(CiError::Database)
}
/// List evaluations with optional jobset_id and status filters, with
/// List evaluations with optional `jobset_id` and status filters, with
/// pagination.
pub async fn list_filtered(
pool: &PgPool,
@ -145,7 +145,7 @@ pub async fn set_inputs_hash(
Ok(())
}
/// Check if an evaluation with the same inputs_hash already exists for this
/// Check if an evaluation with the same `inputs_hash` already exists for this
/// jobset.
pub async fn get_by_inputs_hash(
pool: &PgPool,

View file

@ -52,7 +52,7 @@ pub async fn get(pool: &PgPool, id: Uuid) -> Result<ProjectMember> {
.map_err(|e| {
match e {
sqlx::Error::RowNotFound => {
CiError::NotFound(format!("Project member {} not found", id))
CiError::NotFound(format!("Project member {id} not found"))
},
_ => CiError::Database(e),
}
@ -123,7 +123,7 @@ pub async fn update(
.map_err(|e| {
match e {
sqlx::Error::RowNotFound => {
CiError::NotFound(format!("Project member {} not found", id))
CiError::NotFound(format!("Project member {id} not found"))
},
_ => CiError::Database(e),
}
@ -141,8 +141,7 @@ pub async fn delete(pool: &PgPool, id: Uuid) -> Result<()> {
.await?;
if result.rows_affected() == 0 {
return Err(CiError::NotFound(format!(
"Project member {} not found",
id
"Project member {id} not found"
)));
}
Ok(())

View file

@ -9,7 +9,7 @@ use crate::{
};
/// Search entity types
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SearchEntity {
Projects,
Jobsets,
@ -18,14 +18,14 @@ pub enum SearchEntity {
}
/// Sort order for search results
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SortOrder {
Asc,
Desc,
}
/// Sort field for builds
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BuildSortField {
CreatedAt,
JobName,
@ -34,7 +34,7 @@ pub enum BuildSortField {
}
/// Sort field for projects
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ProjectSortField {
Name,
CreatedAt,
@ -42,7 +42,7 @@ pub enum ProjectSortField {
}
/// Build status filter
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BuildStatusFilter {
Pending,
Running,
@ -492,7 +492,7 @@ pub async fn quick_search(
query: &str,
limit: i64,
) -> Result<(Vec<Project>, Vec<Build>)> {
let pattern = format!("%{}%", query);
let pattern = format!("%{query}%");
let projects = sqlx::query_as::<_, Project>(
"SELECT * FROM projects WHERE name ILIKE $1 OR description ILIKE $1 ORDER \

View file

@ -43,7 +43,7 @@ pub async fn get(pool: &PgPool, id: Uuid) -> Result<StarredJob> {
.map_err(|e| {
match e {
sqlx::Error::RowNotFound => {
CiError::NotFound(format!("Starred job {} not found", id))
CiError::NotFound(format!("Starred job {id} not found"))
},
_ => CiError::Database(e),
}
@ -107,7 +107,7 @@ pub async fn delete(pool: &PgPool, id: Uuid) -> Result<()> {
.execute(pool)
.await?;
if result.rows_affected() == 0 {
return Err(CiError::NotFound(format!("Starred job {} not found", id)));
return Err(CiError::NotFound(format!("Starred job {id} not found")));
}
Ok(())
}

View file

@ -29,7 +29,7 @@ pub fn hash_password(password: &str) -> Result<String> {
argon2
.hash_password(password.as_bytes(), &salt)
.map(|h| h.to_string())
.map_err(|e| CiError::Internal(format!("Password hashing failed: {}", e)))
.map_err(|e| CiError::Internal(format!("Password hashing failed: {e}")))
}
/// Verify a password against a hash
@ -37,7 +37,7 @@ pub fn verify_password(password: &str, hash: &str) -> Result<bool> {
use argon2::{Argon2, PasswordHash, PasswordVerifier};
let parsed_hash = PasswordHash::new(hash)
.map_err(|e| CiError::Internal(format!("Invalid password hash: {}", e)))?;
.map_err(|e| CiError::Internal(format!("Invalid password hash: {e}")))?;
let argon2 = Argon2::default();
Ok(
argon2
@ -134,7 +134,7 @@ pub async fn get(pool: &PgPool, id: Uuid) -> Result<User> {
.map_err(|e| {
match e {
sqlx::Error::RowNotFound => {
CiError::NotFound(format!("User {} not found", id))
CiError::NotFound(format!("User {id} not found"))
},
_ => CiError::Database(e),
}
@ -321,7 +321,7 @@ pub async fn delete(pool: &PgPool, id: Uuid) -> Result<()> {
.execute(pool)
.await?;
if result.rows_affected() == 0 {
return Err(CiError::NotFound(format!("User {} not found", id)));
return Err(CiError::NotFound(format!("User {id} not found")));
}
Ok(())
}
@ -335,7 +335,7 @@ pub async fn upsert_oauth_user(
oauth_provider_id: &str,
) -> Result<User> {
// Use provider ID in username to avoid collisions
let unique_username = format!("{}_{}", username, oauth_provider_id);
let unique_username = format!("{username}_{oauth_provider_id}");
// Check if user exists by OAuth provider ID pattern
let existing =
@ -381,7 +381,7 @@ pub async fn upsert_oauth_user(
VALUES ($1, $2, $3, NULL, 'read-only') RETURNING *",
)
.bind(&unique_username)
.bind(email.unwrap_or(&format!("{}@oauth.local", unique_username)))
.bind(email.unwrap_or(&format!("{unique_username}@oauth.local")))
.bind(user_type_str)
.fetch_one(pool)
.await
@ -395,7 +395,7 @@ pub async fn upsert_oauth_user(
})
}
/// Create a new session for a user. Returns (session_token, session_id).
/// Create a new session for a user. Returns (`session_token`, `session_id`).
pub async fn create_session(
pool: &PgPool,
user_id: Uuid,

View file

@ -49,16 +49,19 @@ pub const VALID_PROJECT_ROLES: &[&str] = &[
];
/// Check if a global role is valid
#[must_use]
pub fn is_valid_role(role: &str) -> bool {
VALID_ROLES.contains(&role)
}
/// Check if a project role is valid
#[must_use]
pub fn is_valid_project_role(role: &str) -> bool {
VALID_PROJECT_ROLES.contains(&role)
}
/// Get the highest project role (for permission checks)
#[must_use]
pub fn project_role_level(role: &str) -> i32 {
match role {
PROJECT_ROLE_ADMIN => 3,
@ -70,6 +73,7 @@ pub fn project_role_level(role: &str) -> i32 {
/// Check if user has required project permission
/// Higher level roles automatically have lower level permissions
#[must_use]
pub fn has_project_permission(user_role: &str, required: &str) -> bool {
let user_level = project_role_level(user_role);
let required_level = project_role_level(required);

View file

@ -160,7 +160,7 @@ pub fn validate_full_name(name: &str) -> Result<(), ValidationError> {
});
}
if name.chars().any(|c| c.is_control()) {
if name.chars().any(char::is_control) {
return Err(ValidationError {
field: "full_name".to_string(),
message: "Full name cannot contain control characters".to_string(),