various: clean up multiplatform mod resolution; add lockfile management

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: If1fed3ad9f9273266ae6e0e24d57b6996a6a6964
This commit is contained in:
raf 2026-05-03 21:21:16 +03:00
commit da15ebf9bd
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
14 changed files with 818 additions and 141 deletions

View file

@ -92,6 +92,9 @@ pub enum Commands {
/// Manage fork configuration
Fork(ForkArgs),
/// Check and repair lockfile integrity
Lockfile(LockfileArgs),
}
#[derive(Args)]
@ -221,17 +224,33 @@ pub struct UpdateArgs {
#[derive(Args)]
pub struct LsArgs {
/// Show detailed information
/// Show all optional columns (equivalent to enabling all --show-* flags)
#[clap(short, long)]
pub detailed: bool,
/// Add update information for projects
/// Show project type column (mod, resourcepack, shader, etc.)
#[clap(long = "show-type")]
pub show_type: bool,
/// Show project side column (client, server, both)
#[clap(long = "show-side")]
pub show_side: bool,
/// Show first slug column
#[clap(long = "show-slug")]
pub show_slug: bool,
/// Show dependency count column
#[clap(long = "show-links")]
pub show_links: bool,
/// Show provider versions (when present) column
#[clap(long = "show-versions")]
pub show_versions: bool,
/// Include update information for projects
#[clap(short = 'c', long = "check-updates")]
pub check_updates: bool,
/// Maximum length for project names
#[clap(long = "name-max-length")]
pub name_max_length: Option<usize>,
}
#[derive(Args)]
@ -627,3 +646,24 @@ pub enum ForkSubcommand {
projects: Vec<String>,
},
}
/// Lockfile management subcommand arguments
#[derive(Debug, Args)]
#[command(args_conflicts_with_subcommands = true)]
pub struct LockfileArgs {
#[clap(subcommand)]
pub subcommand: LockfileSubcommand,
}
#[derive(Debug, Subcommand)]
pub enum LockfileSubcommand {
/// Check the lockfile for known issues
Doctor,
/// Repair known lockfile issues
Repair {
/// Skip operations that require network access
#[clap(long)]
offline: bool,
},
}

View file

@ -148,7 +148,7 @@ async fn resolve_input(
let mut merged = projects.remove(0);
for project in projects {
merged.merge(project);
merged = merged.merged(project)?;
}
Ok(merged)
}

View file

@ -237,7 +237,7 @@ pub async fn execute(
let mut combined_project = projects_to_merge.remove(0);
for project in projects_to_merge {
combined_project.merge(project);
combined_project = combined_project.merged(project)?;
}
// Apply user-specified properties

View file

@ -15,8 +15,11 @@ use crate::{
git::{self, VcsType},
model::{
LockFile,
Project,
credentials::ResolvedCredentials,
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
},
platform::create_platform,
};
const PAKKU_DIR: &str = ".pakku";
@ -689,6 +692,54 @@ fn execute_sync() -> Result<(), PakkerError> {
Ok(())
}
fn resolve_project_files(
project: &mut Project,
slug: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<(), PakkerError> {
let handle = tokio::runtime::Handle::current();
if let Ok(platform) = create_platform("modrinth", None)
&& let Ok(mut resolved) = handle
.block_on(platform.request_project_with_files(slug, mc_versions, loaders))
&& !resolved.files.is_empty()
&& resolved.select_file(mc_versions, loaders, None).is_ok()
{
project.files = resolved.files;
return Ok(());
}
let creds = ResolvedCredentials::load();
let cf_key = creds.curseforge_api_key().map(String::from);
if let Ok(platform) = create_platform("curseforge", cf_key)
&& let Ok(mut resolved) = handle
.block_on(platform.request_project_with_files(slug, mc_versions, loaders))
&& !resolved.files.is_empty()
&& resolved.select_file(mc_versions, loaders, None).is_ok()
{
project.files = resolved.files;
return Ok(());
}
let cf_key2 = ResolvedCredentials::load()
.curseforge_api_key()
.map(String::from);
if let Ok(platform) = create_platform("multiplatform", cf_key2)
&& let Ok(mut resolved) = handle
.block_on(platform.request_project_with_files(slug, mc_versions, loaders))
&& !resolved.files.is_empty()
&& resolved.select_file(mc_versions, loaders, None).is_ok()
{
project.files = resolved.files;
return Ok(());
}
Err(PakkerError::FileSelectionError(format!(
"Could not resolve files for '{slug}'"
)))
}
fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
@ -765,7 +816,24 @@ fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
continue;
}
local_lockfile.add_project(project.clone());
let mut project = project.clone();
if project.files.is_empty() {
// Try to resolve files from platforms
if let Some(slug) = project.slug.values().next().cloned() {
let mc_versions = parent_lockfile.mc_versions.clone();
let loaders: Vec<String> =
parent_lockfile.loaders.keys().cloned().collect();
if let Err(e) =
resolve_project_files(&mut project, &slug, &mc_versions, &loaders)
{
log::debug!(
"Failed to resolve files for '{}': {e}",
project.get_name()
);
}
}
}
local_lockfile.add_project(project);
promoted.push(project_arg);
} else {
not_found.push(project_arg);

View file

@ -0,0 +1,304 @@
use std::{collections::HashSet, path::Path};
use yansi::Paint;
use crate::{
cli::LockfileSubcommand,
error::Result,
model::{LockFile, Project, credentials::ResolvedCredentials},
platform::create_platform,
};
pub fn execute(args: &crate::cli::LockfileArgs) -> Result<()> {
match &args.subcommand {
LockfileSubcommand::Doctor => execute_doctor(),
LockfileSubcommand::Repair { offline } => execute_repair(*offline),
}
}
fn execute_doctor() -> Result<()> {
let config_dir = Path::new(".");
let lockfile = LockFile::load(config_dir)?;
let issues = diagnose(&lockfile);
if issues.is_empty() {
println!("{}", "✓ Lockfile is healthy".green());
return Ok(());
}
println!("{}", "Lockfile Issues:".yellow().bold());
println!();
for issue in &issues {
println!(" {} {}", "".red(), issue);
}
println!();
println!(
" {}",
format!(
"{} issue(s) found. Run 'pakker lockfile repair' to fix.",
issues.len()
)
.dim()
);
Ok(())
}
fn execute_repair(offline: bool) -> Result<()> {
let config_dir = Path::new(".");
let mut lockfile = LockFile::load(config_dir)?;
let issues_before = diagnose(&lockfile);
if issues_before.is_empty() {
println!("{}", "✓ Lockfile is healthy — nothing to repair".green());
return Ok(());
}
println!(
"{}",
format!(
"Found {} issue(s). Attempting repair...",
issues_before.len()
)
.yellow()
);
println!();
let mut fixed = Vec::new();
let mut skipped = Vec::new();
// Fix 1: Resolve projects with empty files
let empty_file_count = lockfile
.projects
.iter()
.filter(|p| p.files.is_empty())
.count();
if empty_file_count > 0 {
if offline {
skipped.push(format!(
"{empty_file_count} project(s) with missing files (requires network)"
));
} else {
let resolved = resolve_empty_files(&mut lockfile);
if resolved > 0 {
fixed.push(format!(
"{resolved}/{empty_file_count} project(s) with missing files \
resolved"
));
}
if resolved < empty_file_count {
skipped.push(format!(
"{} project(s) could not be resolved (check slugs or network)",
empty_file_count - resolved
));
}
}
}
// Fix 2: Deduplicate
let project_count_before = lockfile.projects.len();
lockfile.deduplicate_projects();
let removed = project_count_before - lockfile.projects.len();
if removed > 0 {
fixed.push(format!("{removed} duplicate project(s) merged"));
}
// Save repaired lockfile
lockfile.save(config_dir)?;
// Report
if !fixed.is_empty() {
println!("{}", "Fixed:".green());
for item in &fixed {
println!(" {} {}", "".green(), item);
}
println!();
}
if !skipped.is_empty() {
println!("{}", "Skipped (requires attention):".yellow());
for item in &skipped {
println!(" {} {}", "!".yellow(), item);
}
println!();
}
Ok(())
}
fn diagnose(lockfile: &LockFile) -> Vec<String> {
let mut issues = Vec::new();
// Check for empty mc_versions
if lockfile.mc_versions.is_empty() {
issues.push("No Minecraft versions configured".to_string());
}
// Check for empty loaders
if lockfile.loaders.is_empty() {
issues.push("No mod loaders configured".to_string());
}
// Check lockfile version
const LOCKFILE_VERSION: u32 = 2;
if lockfile.lockfile_version < LOCKFILE_VERSION {
issues.push(format!(
"Lockfile version {} is outdated (current: {LOCKFILE_VERSION})",
lockfile.lockfile_version
));
}
// Check projects
let mut seen_slugs: HashSet<&str> = HashSet::new();
let mut duplicate_count = 0u32;
for project in &lockfile.projects {
let name = project.get_name();
// Empty files
if project.files.is_empty() && !project.slug.is_empty() {
issues.push(format!("'{name}' has no resolved files"));
}
// No slugs at all
if project.slug.is_empty() {
issues.push(format!("'{name}' has no platform slugs"));
}
// No platform IDs
if project.id.is_empty() && !project.slug.is_empty() {
issues.push(format!("'{name}' has no platform IDs"));
}
// Duplicate slugs
for slug in project.slug.values() {
if !seen_slugs.insert(slug.as_str()) {
duplicate_count += 1;
}
}
}
if duplicate_count > 0 {
issues.push(format!(
"{duplicate_count} duplicate slug conflict(s) across projects"
));
}
issues
}
fn resolve_empty_files(lockfile: &mut LockFile) -> usize {
let mut resolved = 0usize;
for project in lockfile.projects.iter_mut() {
if !project.files.is_empty() {
continue;
}
for (platform_name, platform_slug) in &project.slug.clone() {
let mc_versions = lockfile.mc_versions.clone();
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
if resolve_project_from_platform(
project,
platform_name,
platform_slug,
&mc_versions,
&loaders,
) {
resolved += 1;
println!(
" {} Resolved files for '{}' via {platform_name}",
"".green(),
project.get_name()
);
break;
}
}
// Also try by platform ID if slug resolution failed
if project.files.is_empty() {
for (platform_name, platform_id) in &project.id.clone() {
if project.slug.contains_key(platform_name) {
continue;
}
let mc_versions = lockfile.mc_versions.clone();
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
if resolve_project_from_platform(
project,
platform_name,
platform_id,
&mc_versions,
&loaders,
) {
resolved += 1;
println!(
" {} Resolved files for '{}' via {platform_name} (by ID)",
"".green(),
project.get_name()
);
break;
}
}
}
}
resolved
}
fn resolve_project_from_platform(
project: &mut Project,
platform_name: &str,
identifier: &str,
mc_versions: &[String],
loaders: &[String],
) -> bool {
let handle = tokio::runtime::Handle::current();
let api_key = match platform_name {
"curseforge" => {
ResolvedCredentials::load()
.curseforge_api_key()
.map(String::from)
},
"modrinth" => None,
"github" => {
ResolvedCredentials::load()
.github_access_token()
.map(String::from)
},
_ => None,
};
let platform = match create_platform(platform_name, api_key) {
Ok(p) => p,
Err(_) => {
log::debug!("Failed to create platform '{platform_name}'");
return false;
},
};
match handle.block_on(platform.request_project_with_files(
identifier,
mc_versions,
loaders,
)) {
Ok(mut resolved) => {
if resolved.files.is_empty() {
return false;
}
if resolved.select_file(mc_versions, loaders, None).is_err() {
return false;
}
project.files = resolved.files;
true
},
Err(e) => {
log::debug!("Platform '{platform_name}' failed for '{identifier}': {e}");
false
},
}
}

View file

@ -4,8 +4,8 @@ use yansi::Paint;
use crate::{cli::LsArgs, error::Result, model::LockFile};
/// Truncate a name to fit within `max_len` characters, adding "..." if
/// truncated
const COL_GAP: usize = 3; // spaces between columns
fn truncate_name(name: &str, max_len: usize) -> String {
if name.len() <= max_len {
name.to_string()
@ -16,8 +16,74 @@ fn truncate_name(name: &str, max_len: usize) -> String {
}
}
struct ColumnWidths {
name: usize,
file: usize,
r#type: usize,
side: usize,
slug: usize,
links: usize,
versions: usize,
}
fn compute_widths(
lockfile: &LockFile,
show_type: bool,
show_side: bool,
show_slug: bool,
show_links: bool,
show_versions: bool,
) -> ColumnWidths {
let mut w = ColumnWidths {
name: "Name".len(),
file: "File".len(),
r#type: "Type".len(),
side: "Side".len(),
slug: "Slug".len(),
links: "Links".len(),
versions: "Versions".len(),
};
for p in &lockfile.projects {
w.name = w.name.max(p.get_name().len().min(50));
let file_len = p.files.first().map_or(1, |f| f.file_name.len());
w.file = w.file.max(file_len);
if show_type {
let t = format!("{:?}", p.r#type).to_lowercase();
w.r#type = w.r#type.max(t.len());
}
if show_side {
let s = format!("{:?}", p.side).to_lowercase();
w.side = w.side.max(s.len());
}
if show_slug {
let slug_len = p.slug.values().next().map_or(1, String::len);
w.slug = w.slug.max(slug_len);
}
if show_links {
let links_len = p.pakku_links.len().to_string().len();
w.links = w.links.max(links_len);
}
if show_versions {
let v = if p.files.len() > 1 {
p.files
.iter()
.map(|f| format!("{}: {}", f.file_type, f.file_name))
.collect::<Vec<_>>()
.join(", ")
.len()
} else {
1
};
w.versions = w.versions.max(v);
}
}
w
}
pub fn execute(args: &LsArgs, lockfile_path: &Path) -> Result<()> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
@ -26,90 +92,155 @@ pub fn execute(args: &LsArgs, lockfile_path: &Path) -> Result<()> {
return Ok(());
}
let project_count = lockfile.projects.len();
let show_type = args.detailed || args.show_type;
let show_side = args.detailed || args.show_side;
let show_slug = args.detailed || args.show_slug;
let show_links = args.detailed || args.show_links;
let show_versions = args.detailed || args.show_versions;
let widths = compute_widths(
&lockfile,
show_type,
show_side,
show_slug,
show_links,
show_versions,
);
// Build header
let mut header_cols: Vec<(&str, usize)> =
vec![("Name", widths.name), ("File", widths.file)];
if show_type {
header_cols.push(("Type", widths.r#type));
}
if show_side {
header_cols.push(("Side", widths.side));
}
if show_slug {
header_cols.push(("Slug", widths.slug));
}
if show_links {
header_cols.push(("Links", widths.links));
}
if show_versions {
header_cols.push(("Versions", widths.versions));
}
println!(
"{} ({})",
"Installed projects".bold(),
lockfile.projects.len().to_string().cyan().bold()
project_count.to_string().cyan().bold()
);
println!();
// Calculate max name length for alignment
let max_name_len = args.name_max_length.unwrap_or_else(|| {
lockfile
.projects
.iter()
.map(|p| p.get_name().len())
.max()
.unwrap_or(20)
.min(50)
});
// Print header
let header_line: Vec<String> = header_cols
.iter()
.map(|(text, width)| {
if text == &header_cols.last().unwrap().0 {
format!("{text}")
} else {
format!("{:<width$}", text, width = *width)
}
})
.collect();
println!("{}", header_line.join(&" ".repeat(COL_GAP)).cyan());
// Underline with dashes
let dash_line: String = header_cols
.iter()
.enumerate()
.map(|(i, (_, width))| {
let dashes = "-".repeat(*width);
if i == header_cols.len() - 1 {
dashes
} else {
format!("{:<width$}", dashes, width = *width)
}
})
.collect::<Vec<_>>()
.join(&" ".repeat(COL_GAP));
println!("{}", dash_line.dim());
for project in &lockfile.projects {
// Check for version mismatch across providers
let version_warning = if project.versions_match_across_providers() {
""
} else {
// Use the detailed check_version_mismatch for logging
let name = truncate_name(&project.get_name(), widths.name.min(50));
let warning_marker = if !project.versions_match_across_providers() {
if let Some(mismatch_detail) = project.check_version_mismatch() {
log::warn!("{mismatch_detail}");
}
" [!] versions do not match across providers"
" [!]"
} else {
""
};
if args.detailed {
let id = project.pakku_id.as_deref().unwrap_or("unknown");
let name = truncate_name(&project.get_name(), max_name_len);
let name_line = format!(" {name} ({id})");
if version_warning.is_empty() {
println!("{}", name_line.bold());
} else {
println!("{}{}", name_line.bold(), version_warning.yellow());
}
println!(" {} {:?}", "Type:".dim(), project.r#type);
println!(" {} {:?}", "Side:".dim(), project.side);
let file_name = project
.files
.first()
.map(|f| f.file_name.as_str())
.unwrap_or("-");
if let Some(file) = project.files.first() {
println!(" {} {}", "File:".dim(), file.file_name);
println!(
" {} {} ({})",
"Version:".dim(),
file.release_type,
file.date_published
);
}
// Show version details if there's a mismatch
if !version_warning.is_empty() {
println!(" {}:", "Provider versions".dim());
for file in &project.files {
println!(" {}: {}", file.file_type, file.file_name);
}
}
if !project.pakku_links.is_empty() {
println!(
" {} {}",
"Dependencies:".dim(),
project.pakku_links.len()
);
}
println!();
let name_display = format!("{name}{warning_marker}");
print!(" ");
if warning_marker.is_empty() {
print!(
"{}",
format!("{:<width$}", name_display, width = widths.name).bold()
);
} else {
let name = truncate_name(&project.get_name(), max_name_len);
let file_info = project
.files
.first()
.map(|f| format!(" ({})", f.file_name))
.unwrap_or_default();
println!(
" {}{}{}",
name.bold(),
file_info.dim(),
version_warning.yellow()
print!(
"{}",
format!("{:<width$}", name_display, width = widths.name).yellow()
);
}
print!("{}", " ".repeat(COL_GAP));
print!(
"{}",
format!("{:<width$}", file_name, width = widths.file).dim()
);
if show_type {
let t = format!("{:?}", project.r#type).to_lowercase();
print!("{}", " ".repeat(COL_GAP));
print!("{:<width$}", t, width = widths.r#type);
}
if show_side {
let s = format!("{:?}", project.side).to_lowercase();
print!("{}", " ".repeat(COL_GAP));
print!("{:<width$}", s, width = widths.side);
}
if show_slug {
let slug = project
.slug
.values()
.next()
.cloned()
.unwrap_or_else(|| "-".to_string());
print!("{}", " ".repeat(COL_GAP));
print!("{:<width$}", slug, width = widths.slug);
}
if show_links {
let links = project.pakku_links.len().to_string();
print!("{}", " ".repeat(COL_GAP));
print!("{:<width$}", links, width = widths.links);
}
if show_versions {
let v = if project.files.len() > 1 {
project
.files
.iter()
.map(|f| format!("{}: {}", f.file_type, f.file_name))
.collect::<Vec<_>>()
.join(", ")
} else {
String::from("-")
};
print!("{}", " ".repeat(COL_GAP));
print!("{v}");
}
println!();
}
Ok(())

View file

@ -13,6 +13,7 @@ pub mod import;
pub mod init;
pub mod inspect;
pub mod link;
pub mod lockfile;
pub mod ls;
pub mod remote;
pub mod remote_update;

View file

@ -255,49 +255,88 @@ async fn check_project_update(
project: &Project,
lockfile: &LockFile,
) -> Result<Option<ProjectUpdate>> {
// Get primary slug
let slug = project
.slug
.values()
.next()
.ok_or_else(|| {
crate::error::PakkerError::InvalidProject("No slug found".to_string())
})?
.clone();
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
let mc_versions = &lockfile.mc_versions;
// Try each platform in project
for platform_name in project.id.keys() {
let mut errors: Vec<String> = Vec::new();
for (platform_name, platform_slug) in &project.slug {
let api_key = get_api_key(platform_name);
let Ok(platform) = create_platform(platform_name, api_key) else {
continue;
};
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
if let Ok(updated_project) = platform
.request_project_with_files(&slug, &lockfile.mc_versions, &loaders)
match platform
.request_project_with_files(platform_slug, mc_versions, &loaders)
.await
{
// Compare files to detect updates
let file_updates = detect_file_updates(project, &updated_project);
Ok(updated_project) => {
let file_updates = detect_file_updates(project, &updated_project);
if !file_updates.is_empty() {
return Ok(Some(ProjectUpdate {
slug: project.slug.clone(),
name: project.name.values().next().cloned().unwrap_or_default(),
project_type: format!("{:?}", project.r#type),
side: format!("{:?}", project.side),
file_updates,
}));
}
if !file_updates.is_empty() {
return Ok(Some(ProjectUpdate {
slug: project.slug.clone(),
name: project.name.values().next().cloned().unwrap_or_default(),
project_type: format!("{:?}", project.r#type),
side: format!("{:?}", project.side),
file_updates,
}));
}
return Ok(None); // No updates
return Ok(None);
},
Err(e) => {
errors.push(format!("{platform_name}: {e}"));
},
}
}
Err(crate::error::PakkerError::PlatformApiError(
"Failed to check for updates on any platform".to_string(),
))
// Also try platforms that have IDs but no slugs (uncommon edge case)
for platform_name in project.id.keys() {
if project.slug.contains_key(platform_name) {
continue;
}
let platform_id = project
.id
.get(platform_name)
.expect("key must exist in id map");
let api_key = get_api_key(platform_name);
let Ok(platform) = create_platform(platform_name, api_key) else {
continue;
};
match platform
.request_project_with_files(platform_id, mc_versions, &loaders)
.await
{
Ok(updated_project) => {
let file_updates = detect_file_updates(project, &updated_project);
if !file_updates.is_empty() {
return Ok(Some(ProjectUpdate {
slug: project.slug.clone(),
name: project.name.values().next().cloned().unwrap_or_default(),
project_type: format!("{:?}", project.r#type),
side: format!("{:?}", project.side),
file_updates,
}));
}
return Ok(None);
},
Err(e) => {
errors.push(format!("{platform_name}(by id): {e}"));
},
}
}
let error_detail = if errors.is_empty() {
"no platform slugs or IDs available".to_string()
} else {
errors.join(" | ")
};
Err(crate::error::PakkerError::PlatformApiError(format!(
"Failed to check for updates on any platform ({error_detail})"
)))
}
fn detect_file_updates(

View file

@ -238,5 +238,9 @@ pub async fn run() -> Result<(), PakkerError> {
cli::commands::fork::execute(&args)?;
Ok(())
},
Commands::Lockfile(args) => {
cli::commands::lockfile::execute(&args)?;
Ok(())
},
}
}