treewide: migrate to multi-crate layout

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: I11a2103f3530f07409177404577b90136a6a6964
This commit is contained in:
raf 2026-05-03 00:33:21 +03:00
commit d445b1814a
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
68 changed files with 247 additions and 72 deletions

View file

@ -0,0 +1,395 @@
use std::{collections::HashMap, time::Duration};
use crate::{
error::{MultiError, PakkerError, Result},
http,
model::{
Config,
LockFile,
PakkerCredentialsFile,
Project,
Target,
credentials::ResolvedCredentials,
set_keyring_secret,
},
platform::create_platform,
resolver::DependencyResolver,
ui_utils::prompt_curseforge_api_key,
};
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
lockfile.loaders.keys().cloned().collect()
}
fn needs_curseforge(target: Option<&Target>) -> bool {
matches!(
target,
Some(Target::CurseForge) | Some(Target::Multiplatform)
)
}
async fn ensure_curseforge_credentials() -> Result<bool> {
let creds = ResolvedCredentials::load();
if creds.curseforge_api_key().is_some() {
return Ok(true);
}
if let Some(key) = prompt_curseforge_api_key(false)? {
// Verify the key before saving
let client = http::create_http_client();
let response = client
.get("https://api.curseforge.com/v1/mods/238222")
.header("x-api-key", &key)
.timeout(Duration::from_secs(10))
.send()
.await;
match response {
Ok(resp) if resp.status().is_success() => {
let mut creds_file = PakkerCredentialsFile::load()?;
set_keyring_secret("curseforge_api_key", &key)?;
creds_file.curseforge_api_key = Some(key.clone());
creds_file.save()?;
println!("CurseForge API key verified and saved.");
Ok(true)
},
Ok(resp) => {
println!(
"Warning: CurseForge API key verification failed (HTTP {}).",
resp.status()
);
if crate::ui_utils::prompt_yes_no(
"Save this key anyway?",
false,
false,
)? {
let mut creds_file = PakkerCredentialsFile::load()?;
set_keyring_secret("curseforge_api_key", &key)?;
creds_file.curseforge_api_key = Some(key);
creds_file.save()?;
Ok(true)
} else {
Ok(false)
}
},
Err(e) => {
println!("Warning: Could not verify CurseForge API key: {e}");
if crate::ui_utils::prompt_yes_no(
"Save this key anyway?",
false,
false,
)? {
let mut creds_file = PakkerCredentialsFile::load()?;
set_keyring_secret("curseforge_api_key", &key)?;
creds_file.curseforge_api_key = Some(key);
creds_file.save()?;
Ok(true)
} else {
Ok(false)
}
},
}
} else {
Ok(false)
}
}
pub fn create_all_platforms()
-> HashMap<String, Box<dyn crate::platform::PlatformClient>> {
let mut platforms = HashMap::new();
let credentials = ResolvedCredentials::load();
let curseforge_key = credentials.curseforge_api_key().map(String::from);
if let Ok(platform) = create_platform("multiplatform", curseforge_key) {
platforms.insert("multiplatform".to_owned(), platform);
} else if let Ok(platform) = create_platform("modrinth", None) {
platforms.insert("modrinth".to_owned(), platform);
}
platforms
}
async fn resolve_input(
input: &str,
platforms: &HashMap<String, Box<dyn crate::platform::PlatformClient>>,
lockfile: &LockFile,
) -> Result<Project> {
let mut projects = Vec::new();
for (platform_name, client) in platforms {
match client
.request_project_with_files(
input,
&lockfile.mc_versions,
&get_loaders(lockfile),
)
.await
{
Ok(project) => {
log::debug!("Resolved '{input}' on {platform_name}");
projects.push(project);
},
Err(e) => {
log::debug!("Could not resolve '{input}' on {platform_name}: {e}");
},
}
}
if projects.is_empty() {
return Err(PakkerError::ProjectNotFound(input.to_string()));
}
if projects.len() == 1 {
return Ok(projects.remove(0));
}
let mut merged = projects.remove(0);
for project in projects {
merged.merge(project);
}
Ok(merged)
}
use std::path::Path;
use crate::{cli::AddArgs, model::fork::LocalConfig};
#[expect(
clippy::future_not_send,
reason = "not required to be Send; only called from single-threaded context"
)]
pub async fn execute(
args: AddArgs,
global_yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
let skip_prompts = global_yes;
log::info!("Adding projects: {:?}", args.inputs);
// Load lockfile
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
// Check if lockfile exists (try both pakker-lock.json and pakku-lock.json)
let lockfile_exists =
lockfile_path.exists() || lockfile_dir.join("pakku-lock.json").exists();
if !lockfile_exists {
// Try to load config from both pakker.json and pakku.json
let local_config = LocalConfig::load(config_dir).or_else(|_| {
let legacy_config_path = config_dir.join("pakku.json");
if legacy_config_path.exists() {
LocalConfig::load(&config_dir.join("pakku.json"))
} else {
Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No pakker.json found",
)))
}
})?;
if local_config.has_parent() {
log::info!("Creating minimal fork lockfile with parent metadata...");
// Check for parent lockfile (try both pakker-lock.json and
// pakku-lock.json)
let parent_paths = [
lockfile_dir.join(".pakku/parent/pakker-lock.json"),
lockfile_dir.join(".pakku/parent/pakku-lock.json"),
];
let parent_found = parent_paths.iter().any(|path| path.exists());
if !parent_found {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Fork configured but parent lockfile not found at \
.pakku/parent/pakker-lock.json or .pakku/parent/pakku-lock.json",
)));
}
// Load parent lockfile to get metadata
let parent_lock_path = parent_paths
.iter()
.find(|path| path.exists())
.ok_or_else(|| {
PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Parent lockfile not found at expected paths",
))
})?;
let parent_lockfile = LockFile::load_with_validation(
parent_lock_path.parent().ok_or_else(|| {
PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Parent lockfile path has no parent directory",
))
})?,
false,
)?;
let minimal_lockfile = LockFile {
target: parent_lockfile.target,
mc_versions: parent_lockfile.mc_versions,
loaders: parent_lockfile.loaders,
projects: Vec::new(),
lockfile_version: 1,
};
minimal_lockfile.save_without_validation(lockfile_dir)?;
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"pakker-lock.json not found and no fork configured. Run 'pakker init' \
first.",
)));
}
}
let mut lockfile = LockFile::load_with_validation(lockfile_dir, false)?;
// Prompt for missing CurseForge credentials when needed
if needs_curseforge(lockfile.target.as_ref()) && !skip_prompts {
match ensure_curseforge_credentials().await {
Ok(true) => {},
Ok(false) => {
log::warn!(
"CurseForge credentials not configured. CurseForge-only projects \
may not resolve."
);
},
Err(e) => {
log::warn!("Failed to set up CurseForge credentials: {e}");
},
}
}
// Load config if available
let _config = Config::load(config_dir).ok();
// Create platforms
let platforms = create_all_platforms();
let mut new_projects = Vec::new();
let mut errors = MultiError::new();
// Resolve each input
for input in &args.inputs {
let project = match resolve_input(input, &platforms, &lockfile).await {
Ok(p) => p,
Err(e) => {
// Collect error but continue with other inputs
log::warn!("Failed to resolve '{input}': {e}");
errors.push(e);
continue;
},
};
// Check if already exists by matching platform IDs (not pakku_id which is
// random)
let already_exists = lockfile.projects.iter().any(|p| {
// Check if any platform ID matches
project.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
});
if already_exists {
if args.update {
log::info!("Updating existing project: {}", project.get_name());
// Find and replace the existing project
if let Some(pos) = lockfile.projects.iter().position(|p| {
project.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
}) {
lockfile.projects[pos] = project;
}
continue;
}
log::info!("Project already exists: {}", project.get_name());
continue;
}
// Prompt for confirmation unless --yes flag is set
if !skip_prompts {
let prompt_msg = format!("Add project '{}'?", project.get_name());
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, skip_prompts)? {
log::info!("Skipping project: {}", project.get_name());
continue;
}
}
new_projects.push(project);
}
// Resolve dependencies unless --no-deps is specified
if !args.no_deps {
log::info!("Resolving dependencies...");
let mut resolver = DependencyResolver::new();
let mut all_new_projects = new_projects.clone();
for project in &mut new_projects {
let deps = resolver.resolve(project, &mut lockfile, &platforms).await?;
for dep in deps {
if !lockfile.projects.iter().any(|p| p.pakku_id == dep.pakku_id)
&& !all_new_projects.iter().any(|p| p.pakku_id == dep.pakku_id)
{
// Prompt user for confirmation unless --yes flag is set
if !skip_prompts {
let prompt_msg = format!(
"Add dependency '{}' required by '{}'?",
dep.get_name(),
project.get_name()
);
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, skip_prompts)?
{
log::info!("Skipping dependency: {}", dep.get_name());
continue;
}
}
log::info!("Adding dependency: {}", dep.get_name());
all_new_projects.push(dep);
}
}
}
new_projects = all_new_projects;
}
// Track count before moving
let added_count = new_projects.len();
// Add projects to lockfile (updates already handled above)
for project in new_projects {
lockfile.add_project(project);
}
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully added {added_count} project(s)");
// Return aggregated errors if any occurred
if !errors.is_empty() {
let error_count = errors.len();
log::warn!(
"{error_count} project(s) failed to resolve (see warnings above)"
);
// Return success if at least some projects were added, otherwise return
// errors
if added_count == 0 && args.inputs.len() == error_count {
return errors.into_result(());
}
}
Ok(())
}

View file

@ -0,0 +1,404 @@
use std::{collections::HashMap, path::Path};
use crate::{
error::{PakkerError, Result},
model::{
Config,
LockFile,
Project,
credentials::ResolvedCredentials,
enums::{ProjectSide, ProjectType, UpdateStrategy},
},
platform::create_platform,
resolver::DependencyResolver,
ui_utils::prompt_curseforge_api_key,
};
/// Parse a common project argument (slug or ID with optional file ID)
/// Format: "input" or "`input#file_id`"
fn parse_common_arg(input: &str) -> (String, Option<String>) {
if let Some((project_input, file_id)) = input.split_once('#') {
(project_input.to_string(), Some(file_id.to_string()))
} else {
(input.to_string(), None)
}
}
/// Parse a GitHub argument (owner/repo with optional tag)
/// Format: "owner/repo" or "owner/repo#tag"
fn parse_github_arg(input: &str) -> Result<(String, String, Option<String>)> {
let (repo_part, tag) = if let Some((r, t)) = input.split_once('#') {
(r, Some(t.to_string()))
} else {
(input, None)
};
if let Some((owner, repo)) = repo_part.split_once('/') {
Ok((owner.to_string(), repo.to_string(), tag))
} else {
Err(PakkerError::InvalidInput(format!(
"Invalid GitHub format '{input}'. Expected: owner/repo or owner/repo#tag"
)))
}
}
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
lockfile.loaders.keys().cloned().collect()
}
#[expect(
clippy::future_not_send,
reason = "not required to be Send; only called from single-threaded context"
)]
#[expect(
clippy::too_many_arguments,
reason = "CLI command handler maps directly from clap args"
)]
pub async fn execute(
cf_arg: Option<String>,
mr_arg: Option<String>,
gh_arg: Option<String>,
project_type: Option<ProjectType>,
project_side: Option<ProjectSide>,
update_strategy: Option<UpdateStrategy>,
redistributable: Option<bool>,
subpath: Option<String>,
aliases: Vec<String>,
export: Option<bool>,
no_deps: bool,
yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
// At least one platform must be specified
if cf_arg.is_none() && mr_arg.is_none() && gh_arg.is_none() {
return Err(PakkerError::InvalidInput(
"At least one platform must be specified (--cf, --mr, or --gh)"
.to_string(),
));
}
log::info!("Adding project with explicit platform specification");
// Load lockfile
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Load config if available
let _config = Config::load(config_dir).ok();
// Get MC versions and loaders from lockfile
let mc_versions = &lockfile.mc_versions;
let loaders = get_loaders(&lockfile);
// Fetch projects from each specified platform
let mut projects_to_merge: Vec<Project> = Vec::new();
// CurseForge
if let Some(cf_input) = cf_arg {
log::info!("Fetching from CurseForge: {cf_input}");
let (input, file_id) = parse_common_arg(&cf_input);
let credentials = ResolvedCredentials::load();
let mut cf_api_key = credentials.curseforge_api_key().map(String::from);
// Prompt for missing CurseForge credentials
if cf_api_key.is_none() && !yes {
if let Some(key) = prompt_curseforge_api_key(false)? {
cf_api_key = Some(key);
}
}
let platform = create_platform("curseforge", cf_api_key)?;
let mut project = platform
.request_project_with_files(&input, mc_versions, &loaders)
.await
.map_err(|e| {
PakkerError::ProjectNotFound(format!(
"CurseForge project '{input}': {e}"
))
})?;
// If file_id specified, filter to that file
if let Some(fid) = file_id {
project.files.retain(|f| f.id == fid);
if project.files.is_empty() {
return Err(PakkerError::FileSelectionError(format!(
"File ID '{fid}' not found for CurseForge project '{input}'"
)));
}
}
projects_to_merge.push(project);
}
// Modrinth
if let Some(mr_input) = mr_arg {
log::info!("Fetching from Modrinth: {mr_input}");
let (input, file_id) = parse_common_arg(&mr_input);
let platform = create_platform("modrinth", None)?;
let mut project = platform
.request_project_with_files(&input, mc_versions, &loaders)
.await
.map_err(|e| {
PakkerError::ProjectNotFound(format!("Modrinth project '{input}': {e}"))
})?;
// If file_id specified, filter to that file
if let Some(fid) = file_id {
project.files.retain(|f| f.id == fid);
if project.files.is_empty() {
return Err(PakkerError::FileSelectionError(format!(
"File ID '{fid}' not found for Modrinth project '{input}'"
)));
}
}
projects_to_merge.push(project);
}
// GitHub
if let Some(gh_input) = gh_arg {
log::info!("Fetching from GitHub: {gh_input}");
let (owner, repo, tag) = parse_github_arg(&gh_input)?;
let gh_token = std::env::var("GITHUB_TOKEN").ok();
let platform = create_platform("github", gh_token)?;
let repo_path = format!("{owner}/{repo}");
let mut project = platform
.request_project_with_files(&repo_path, mc_versions, &loaders)
.await
.map_err(|e| {
PakkerError::ProjectNotFound(format!(
"GitHub repository '{owner}/{repo}': {e}"
))
})?;
// If tag specified, filter to that tag
if let Some(t) = tag {
project.files.retain(|f| f.id == t);
if project.files.is_empty() {
return Err(PakkerError::FileSelectionError(format!(
"Tag '{t}' not found for GitHub repository '{owner}/{repo}'"
)));
}
}
projects_to_merge.push(project);
}
// Merge all fetched projects into one
if projects_to_merge.is_empty() {
return Err(PakkerError::ProjectNotFound(
"No projects could be fetched from specified platforms".to_string(),
));
}
let mut combined_project = projects_to_merge.remove(0);
for project in projects_to_merge {
combined_project.merge(project);
}
// Apply user-specified properties
if let Some(pt) = project_type {
combined_project.r#type = pt;
}
if let Some(ps) = project_side {
combined_project.side = ps;
}
if let Some(us) = update_strategy {
combined_project.update_strategy = us;
}
if let Some(r) = redistributable {
combined_project.redistributable = r;
}
if let Some(sp) = subpath {
combined_project.subpath = Some(sp);
}
if let Some(e) = export {
combined_project.export = e;
}
// Add aliases
for alias in aliases {
combined_project.aliases.insert(alias);
}
// Check if project already exists
let existing_pos = lockfile.projects.iter().position(|p| {
// Check if any platform ID matches
combined_project.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
});
let project_name = combined_project.get_name();
if let Some(pos) = existing_pos {
let existing_project = &lockfile.projects[pos];
let existing_name = existing_project.get_name();
if !yes {
let prompt_msg = format!(
"Project '{existing_name}' already exists. Replace with \
'{project_name}'?"
);
if !crate::ui_utils::prompt_yes_no(&prompt_msg, false, yes)? {
log::info!("Operation cancelled by user");
return Ok(());
}
}
log::info!("Replacing existing project: {existing_name}");
lockfile.projects[pos] = combined_project.clone();
println!("✓ Replaced '{existing_name}' with '{project_name}'");
} else {
if !yes {
let prompt_msg = format!("Add project '{project_name}'?");
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, yes)? {
log::info!("Operation cancelled by user");
return Ok(());
}
}
lockfile.add_project(combined_project.clone());
println!("✓ Added '{project_name}'");
}
// Resolve dependencies unless --no-deps is specified
if !no_deps {
log::info!("Resolving dependencies...");
let platforms = create_all_platforms();
let mut resolver = DependencyResolver::new();
let deps = resolver
.resolve(&mut combined_project, &mut lockfile, &platforms)
.await?;
for dep in deps {
// Skip if already in lockfile
if lockfile.projects.iter().any(|p| {
dep.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
}) {
continue;
}
let dep_name = dep.get_name();
// Prompt user for confirmation unless --yes flag is set
if !yes {
let prompt_msg =
format!("Add dependency '{dep_name}' required by '{project_name}'?");
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true, yes)? {
log::info!("Skipping dependency: {dep_name}");
continue;
}
}
log::info!("Adding dependency: {dep_name}");
lockfile.add_project(dep);
println!(" ✓ Added dependency '{dep_name}'");
}
}
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully completed add-prj operation");
Ok(())
}
fn create_all_platforms()
-> HashMap<String, Box<dyn crate::platform::PlatformClient>> {
let mut platforms = HashMap::new();
let credentials = ResolvedCredentials::load();
let curseforge_key = credentials.curseforge_api_key().map(String::from);
let github_token = credentials.github_access_token().map(String::from);
if let Ok(platform) = create_platform("multiplatform", curseforge_key) {
platforms.insert("multiplatform".to_string(), platform);
} else if let Ok(platform) = create_platform("modrinth", None) {
platforms.insert("modrinth".to_string(), platform);
}
if let Ok(platform) = create_platform("github", github_token) {
platforms.insert("github".to_string(), platform);
}
platforms
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_common_arg_without_file_id() {
let (input, file_id) = parse_common_arg("fabric-api");
assert_eq!(input, "fabric-api");
assert_eq!(file_id, None);
}
#[test]
fn test_parse_common_arg_with_file_id() {
let (input, file_id) = parse_common_arg("fabric-api#12345");
assert_eq!(input, "fabric-api");
assert_eq!(file_id, Some("12345".to_string()));
}
#[test]
fn test_parse_github_arg_owner_repo() {
let result = parse_github_arg("FabricMC/fabric");
assert!(result.is_ok());
let (owner, repo, tag) = result.unwrap();
assert_eq!(owner, "FabricMC");
assert_eq!(repo, "fabric");
assert_eq!(tag, None);
}
#[test]
fn test_parse_github_arg_with_tag() {
let result = parse_github_arg("FabricMC/fabric#v0.15.0");
assert!(result.is_ok());
let (owner, repo, tag) = result.unwrap();
assert_eq!(owner, "FabricMC");
assert_eq!(repo, "fabric");
assert_eq!(tag, Some("v0.15.0".to_string()));
}
#[test]
fn test_parse_github_arg_invalid() {
let result = parse_github_arg("invalid-format");
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("Invalid GitHub format")
);
}
#[test]
fn test_parse_github_arg_missing_repo() {
let result = parse_github_arg("FabricMC/");
assert!(result.is_ok());
let (owner, repo, tag) = result.unwrap();
assert_eq!(owner, "FabricMC");
assert_eq!(repo, "");
assert_eq!(tag, None);
}
}

View file

@ -0,0 +1,147 @@
use std::path::Path;
use yansi::Paint;
use crate::{
error::Result,
model::config::Config,
ui_utils::prompt_input_optional,
};
#[expect(
clippy::too_many_arguments,
reason = "CLI command handler maps directly from clap args"
)]
pub fn execute(
config_path: &Path,
name: Option<String>,
version: Option<String>,
description: Option<String>,
author: Option<String>,
mods_path: Option<String>,
resource_packs_path: Option<String>,
data_packs_path: Option<String>,
worlds_path: Option<String>,
shaders_path: Option<String>,
) -> Result<()> {
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut config = Config::load(config_dir)?;
let mut changed = name.is_some()
|| version.is_some()
|| description.is_some()
|| author.is_some()
|| mods_path.is_some()
|| resource_packs_path.is_some()
|| data_packs_path.is_some()
|| worlds_path.is_some()
|| shaders_path.is_some();
// Modpack properties
if let Some(new_name) = name {
config.name.clone_from(&new_name);
println!("{}", format!("✓ 'name' set to '{new_name}'").green());
}
if let Some(new_version) = version {
config.version.clone_from(&new_version);
println!("{}", format!("✓ 'version' set to '{new_version}'").green());
}
if let Some(new_description) = description {
config.description = Some(new_description.clone());
println!(
"{}",
format!("✓ 'description' set to '{new_description}'").green()
);
}
if let Some(new_author) = author {
config.author = Some(new_author.clone());
println!("{}", format!("✓ 'author' set to '{new_author}'").green());
}
// Project type paths
if let Some(path) = mods_path {
config.paths.insert("mod".to_string(), path.clone());
println!("{}", format!("✓ 'paths.mod' set to '{path}'").green());
}
if let Some(path) = resource_packs_path {
config
.paths
.insert("resource-pack".to_string(), path.clone());
println!(
"{}",
format!("✓ 'paths.resource-pack' set to '{path}'").green()
);
}
if let Some(path) = data_packs_path {
config.paths.insert("data-pack".to_string(), path.clone());
println!("{}", format!("✓ 'paths.data-pack' set to '{path}'").green());
}
if let Some(path) = worlds_path {
config.paths.insert("world".to_string(), path.clone());
println!("{}", format!("✓ 'paths.world' set to '{path}'").green());
}
if let Some(path) = shaders_path {
config.paths.insert("shader".to_string(), path.clone());
println!("{}", format!("✓ 'paths.shader' set to '{path}'").green());
}
if !changed {
// Interactive mode: prompt for values if none were specified
println!(
"{}",
"No changes specified. Enter values interactively (press Enter to skip):"
.yellow()
);
println!();
// Prompt for each configurable field
if let Ok(Some(new_name)) = prompt_input_optional(" Name") {
config.name.clone_from(&new_name);
println!("{}", format!(" ✓ 'name' set to '{new_name}'").green());
changed = true;
}
if let Ok(Some(new_version)) = prompt_input_optional(" Version") {
config.version.clone_from(&new_version);
println!(
"{}",
format!(" ✓ 'version' set to '{new_version}'").green()
);
changed = true;
}
if let Ok(Some(new_description)) = prompt_input_optional(" Description") {
config.description = Some(new_description.clone());
println!(
"{}",
format!(" ✓ 'description' set to '{new_description}'").green()
);
changed = true;
}
if let Ok(Some(new_author)) = prompt_input_optional(" Author") {
config.author = Some(new_author.clone());
println!("{}", format!(" ✓ 'author' set to '{new_author}'").green());
changed = true;
}
if !changed {
println!();
println!("{}", "No changes made.".dim());
return Ok(());
}
}
// Config::save expects directory path, not file path
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
config.save(config_dir)?;
println!("\n{}", "Configuration updated successfully".green().bold());
Ok(())
}

View file

@ -0,0 +1,204 @@
use std::path::Path;
use yansi::Paint;
use crate::{
error::{PakkerError, Result},
model::{
config::Config,
enums::{ProjectSide, ProjectType, UpdateStrategy},
lockfile::LockFile,
},
};
#[expect(
clippy::too_many_arguments,
reason = "CLI command handler maps directly from clap args"
)]
pub fn execute(
config_path: &Path,
lockfile_path: &Path,
project: &str,
r#type: Option<&str>,
side: Option<&str>,
update_strategy: Option<&str>,
redistributable: Option<bool>,
subpath: Option<String>,
add_alias: Option<String>,
remove_alias: Option<String>,
export: Option<bool>,
) -> Result<()> {
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut config = Config::load(config_dir)?;
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
// Find the project in lockfile to get its pakku_id
// Try multiple lookup strategies: pakku_id first, then slug, then name
let found_project = lockfile
.get_project(project)
.or_else(|| {
// Try to find by slug on any platform
lockfile
.projects
.iter()
.find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(project)))
})
.or_else(|| {
// Try to find by name on any platform
lockfile
.projects
.iter()
.find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(project)))
})
.ok_or_else(|| PakkerError::ProjectNotFound(project.to_string()))?;
let pakku_id = found_project.pakku_id.as_ref().ok_or_else(|| {
PakkerError::InvalidProject("Project has no pakku_id".to_string())
})?;
// Get or create project config
let mut project_config = config
.get_project_config(pakku_id)
.cloned()
.unwrap_or_default();
let changed = r#type.is_some()
|| side.is_some()
|| update_strategy.is_some()
|| redistributable.is_some()
|| subpath.is_some()
|| add_alias.is_some()
|| remove_alias.is_some()
|| export.is_some();
if let Some(type_str) = r#type {
let parsed_type = match type_str.to_uppercase().as_str() {
"MOD" => ProjectType::Mod,
"RESOURCE_PACK" | "RESOURCEPACK" => ProjectType::ResourcePack,
"DATA_PACK" | "DATAPACK" => ProjectType::DataPack,
"SHADER" => ProjectType::Shader,
"WORLD" => ProjectType::World,
_ => {
return Err(PakkerError::InvalidProject(format!(
"Invalid type: {type_str}"
)));
},
};
project_config.r#type = Some(parsed_type);
println!(
"{}",
format!("✓ 'type' set to '{parsed_type:?}' for '{pakku_id}'").green()
);
}
if let Some(side_str) = side {
let parsed_side = match side_str.to_uppercase().as_str() {
"CLIENT" => ProjectSide::Client,
"SERVER" => ProjectSide::Server,
"BOTH" => ProjectSide::Both,
_ => {
return Err(PakkerError::InvalidProject(format!(
"Invalid side: {side_str}"
)));
},
};
project_config.side = Some(parsed_side);
println!(
"{}",
format!("✓ 'side' set to '{parsed_side:?}' for '{pakku_id}'").green()
);
}
if let Some(strategy_str) = update_strategy {
let parsed_strategy = match strategy_str.to_uppercase().as_str() {
"LATEST" => UpdateStrategy::Latest,
"NONE" => UpdateStrategy::None,
_ => {
return Err(PakkerError::InvalidProject(format!(
"Invalid update strategy: {strategy_str}"
)));
},
};
project_config.update_strategy = Some(parsed_strategy);
println!(
"{}",
format!(
"✓ 'updateStrategy' set to '{parsed_strategy:?}' for '{pakku_id}'"
)
.green()
);
}
if let Some(new_redistributable) = redistributable {
project_config.redistributable = Some(new_redistributable);
println!(
"{}",
format!(
"✓ 'redistributable' set to '{new_redistributable}' for '{pakku_id}'"
)
.green()
);
}
if let Some(new_subpath) = subpath {
project_config.subpath = Some(new_subpath.clone());
println!(
"{}",
format!("✓ 'subpath' set to '{new_subpath}' for '{pakku_id}'").green()
);
}
if let Some(alias_to_add) = add_alias {
let mut aliases = project_config.aliases.clone().unwrap_or_default();
if !aliases.contains(&alias_to_add) {
aliases.push(alias_to_add.clone());
project_config.aliases = Some(aliases);
println!(
"{}",
format!("✓ Added alias '{alias_to_add}' for '{pakku_id}'").green()
);
}
}
if let Some(alias_to_remove) = remove_alias
&& let Some(mut aliases) = project_config.aliases.clone()
{
aliases.retain(|a| a != &alias_to_remove);
project_config.aliases = Some(aliases);
println!(
"{}",
format!("✓ Removed alias '{alias_to_remove}' from '{pakku_id}'").green()
);
}
if let Some(new_export) = export {
project_config.export = Some(new_export);
println!(
"{}",
format!("✓ 'export' set to '{new_export}' for '{pakku_id}'").green()
);
}
if !changed {
eprintln!(
"{}",
"No changes specified. Use --help for options.".yellow()
);
return Ok(());
}
config.set_project_config(pakku_id.clone(), project_config);
// Config::save expects directory path, not file path
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
config.save(config_dir)?;
println!(
"\n{}",
format!("Project configuration updated for '{pakku_id}'")
.green()
.bold()
);
Ok(())
}

View file

@ -0,0 +1,112 @@
use yansi::Paint;
use crate::{
error::Result,
model::{
PakkerCredentialsFile,
credentials::{CredentialsSource, ResolvedCredentials},
},
};
pub fn execute(
delete: bool,
delete_file: bool,
delete_keyring: bool,
) -> Result<()> {
let delete_effective = delete || delete_file || delete_keyring;
if delete_effective {
// Pakker must never delete or modify Pakku's credentials file
// (~/.pakku/credentials). Deletion here only affects Pakker-managed
// storage (keyring + Pakker-owned file).
let delete_keyring = delete_keyring || delete;
let delete_pakker_file = delete_file || delete;
if delete_pakker_file {
PakkerCredentialsFile::delete()?;
}
if delete_keyring {
ResolvedCredentials::delete_keyring()?;
}
println!("Credentials deleted.");
return Ok(());
}
let creds = ResolvedCredentials::load();
let has_any = creds.curseforge_api_key().is_some()
|| creds.modrinth_token().is_some()
|| creds.github_access_token().is_some();
if !has_any {
println!("{}", "No credentials stored".yellow());
println!("\nUse 'pakker credentials set' to add credentials");
return Ok(());
}
println!("{}", "Stored Credentials:".cyan().bold());
println!();
print_credential(
"CurseForge API Key",
creds.curseforge_api_key(),
creds.curseforge_source(),
);
print_credential(
"Modrinth Token",
creds.modrinth_token(),
creds.modrinth_source(),
);
print_credential(
"GitHub Access Token",
creds.github_access_token(),
creds.github_source(),
);
println!();
println!(
"{}",
format!(
"Credentials file: {}",
PakkerCredentialsFile::get_path()?.display()
)
.cyan()
);
Ok(())
}
fn print_credential(
label: &str,
value: Option<&str>,
source: Option<CredentialsSource>,
) {
if let Some(v) = value {
let masked = mask_key(v);
let source = source.map_or("unknown", source_label);
println!(" {} {} ({})", format!("{label}:").yellow(), masked, source);
}
}
const fn source_label(source: CredentialsSource) -> &'static str {
match source {
CredentialsSource::Env => "env",
CredentialsSource::Keyring => "keyring",
CredentialsSource::PakkerFile => "pakker-file",
}
}
fn mask_key(key: &str) -> String {
if key.len() <= 12 {
return "*".repeat(key.len());
}
let start = &key[..8];
let end = &key[key.len() - 4..];
let middle = "*".repeat(key.len() - 12);
format!("{start}{middle}{end}")
}

View file

@ -0,0 +1,219 @@
use std::{io::Write, time::Duration};
use crate::{
error::{PakkerError, Result},
http,
model::{PakkerCredentialsFile, set_keyring_secret},
ui_utils::{prompt_secret, prompt_yes_no},
};
pub async fn execute(
curseforge_api_key: Option<String>,
modrinth_token: Option<String>,
github_access_token: Option<String>,
) -> Result<()> {
let mut cf_key = curseforge_api_key;
let mut mr_token = modrinth_token;
let mut gh_token = github_access_token;
let any_cli_args =
cf_key.is_some() || mr_token.is_some() || gh_token.is_some();
// Enter interactive mode when no CLI args provided
if !any_cli_args {
println!("No credentials provided via command line.");
println!();
if let Some(key) =
prompt_secret("CurseForge API key (press Enter to skip)")?
{
cf_key = Some(key);
}
if let Some(token) = prompt_secret("Modrinth token (press Enter to skip)")?
{
mr_token = Some(token);
}
if let Some(token) =
prompt_secret("GitHub access token (press Enter to skip)")?
{
gh_token = Some(token);
}
}
let updated_any =
cf_key.is_some() || mr_token.is_some() || gh_token.is_some();
if !updated_any {
println!("No credentials to save.");
return Ok(());
}
// Verify credentials before saving
let client = http::create_http_client();
let mut verified = Vec::new();
if let Some(ref key) = cf_key {
print!("Verifying CurseForge API key... ");
std::io::stdout().flush().ok();
match verify_curseforge(&client, key).await {
Ok(()) => {
println!("valid");
verified.push("CurseForge");
},
Err(e) => {
println!("failed ({e})");
if !prompt_yes_no(
"CurseForge key appears invalid. Save anyway?",
false,
false,
)? {
cf_key = None;
}
},
}
}
if let Some(ref token) = mr_token {
print!("Verifying Modrinth token... ");
std::io::stdout().flush().ok();
match verify_modrinth(&client, token).await {
Ok(()) => {
println!("valid");
verified.push("Modrinth");
},
Err(e) => {
println!("failed ({e})");
if !prompt_yes_no(
"Modrinth token appears invalid. Save anyway?",
false,
false,
)? {
mr_token = None;
}
},
}
}
if let Some(ref token) = gh_token {
print!("Verifying GitHub access token... ");
std::io::stdout().flush().ok();
match verify_github(&client, token).await {
Ok(()) => {
println!("valid");
verified.push("GitHub");
},
Err(e) => {
println!("failed ({e})");
if !prompt_yes_no(
"GitHub token appears invalid. Save anyway?",
false,
false,
)? {
gh_token = None;
}
},
}
}
let mut creds = PakkerCredentialsFile::load()?;
if let Some(key) = cf_key {
let key = key.trim().to_string();
if !key.is_empty() {
set_keyring_secret("curseforge_api_key", &key)?;
creds.curseforge_api_key = Some(key);
}
}
if let Some(token) = mr_token {
let token = token.trim().to_string();
if !token.is_empty() {
set_keyring_secret("modrinth_token", &token)?;
creds.modrinth_token = Some(token);
}
}
if let Some(token) = gh_token {
let token = token.trim().to_string();
if !token.is_empty() {
set_keyring_secret("github_access_token", &token)?;
creds.github_access_token = Some(token);
}
}
creds.save()?;
println!();
if verified.is_empty() {
println!("Credentials saved (unverified).");
} else {
println!("Credentials saved and verified: {}", verified.join(", "));
}
println!(
"Credentials file: {}",
PakkerCredentialsFile::get_path()?.display()
);
println!("Keyring service: pakker");
Ok(())
}
async fn verify_curseforge(
client: &reqwest::Client,
api_key: &str,
) -> Result<()> {
let response = client
.get("https://api.curseforge.com/v1/mods/238222")
.header("x-api-key", api_key)
.timeout(Duration::from_secs(10))
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(PakkerError::PlatformApiError(format!(
"HTTP {}",
response.status()
)))
}
}
async fn verify_modrinth(client: &reqwest::Client, token: &str) -> Result<()> {
let response = client
.get("https://api.modrinth.com/v2/user")
.header("Authorization", token)
.timeout(Duration::from_secs(10))
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(PakkerError::PlatformApiError(format!(
"HTTP {}",
response.status()
)))
}
}
async fn verify_github(client: &reqwest::Client, token: &str) -> Result<()> {
let response = client
.get("https://api.github.com/user")
.header("Authorization", format!("Bearer {token}"))
.header("User-Agent", "pakker")
.timeout(Duration::from_secs(10))
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(PakkerError::PlatformApiError(format!(
"HTTP {}",
response.status()
)))
}
}

View file

@ -0,0 +1,151 @@
use std::time::Duration;
use indicatif::{ProgressBar, ProgressStyle};
use yansi::Paint;
use crate::{error::Result, http, model::credentials::ResolvedCredentials};
pub async fn execute() -> Result<()> {
let creds = ResolvedCredentials::load();
let client = http::create_http_client();
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
spinner.enable_steady_tick(Duration::from_millis(80));
spinner.set_message("Testing credentials...");
let mut all_valid = true;
let mut results = Vec::new();
// Test CurseForge
if let Some(key) = creds.curseforge_api_key() {
spinner.set_message("Testing CurseForge API key...");
match test_curseforge(&client, key).await {
Ok(()) => results.push(("CurseForge API Key", true, None)),
Err(e) => {
results.push(("CurseForge API Key", false, Some(e.to_string())));
all_valid = false;
},
}
} else {
results.push(("CurseForge API Key", false, None));
}
// Test Modrinth
if let Some(token) = creds.modrinth_token() {
spinner.set_message("Testing Modrinth token...");
match test_modrinth(&client, token).await {
Ok(()) => results.push(("Modrinth Token", true, None)),
Err(e) => {
results.push(("Modrinth Token", false, Some(e.to_string())));
all_valid = false;
},
}
} else {
results.push(("Modrinth Token", false, None));
}
// Test GitHub
if let Some(token) = creds.github_access_token() {
spinner.set_message("Testing GitHub access token...");
match test_github(&client, token).await {
Ok(()) => results.push(("GitHub Access Token", true, None)),
Err(e) => {
results.push(("GitHub Access Token", false, Some(e.to_string())));
all_valid = false;
},
}
} else {
results.push(("GitHub Access Token", false, None));
}
spinner.finish_and_clear();
println!("{}", "Credential Test Results:".cyan().bold());
println!();
for (name, valid, error) in results {
if let Some(err) = error {
println!(
" {} {} ({err})",
format!("{name}:").yellow(),
"invalid".red()
);
} else if valid {
println!(" {} {}", format!("{name}:").yellow(), "valid".green());
} else {
println!(" {} {}", format!("{name}:").yellow(), "not configured");
}
}
println!();
if all_valid {
println!("{}", "All configured credentials are valid.".green());
} else {
println!("{}", "Some credentials are invalid or expired.".red());
println!("Use 'pakker credentials set' to update them.");
}
Ok(())
}
async fn test_curseforge(
client: &reqwest::Client,
api_key: &str,
) -> Result<()> {
// Use a well-known mod (JEI) to verify key works for mod lookups
let response = client
.get("https://api.curseforge.com/v1/mods/238222")
.header("x-api-key", api_key)
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(crate::error::PakkerError::PlatformApiError(format!(
"HTTP {}",
response.status()
)))
}
}
async fn test_modrinth(client: &reqwest::Client, token: &str) -> Result<()> {
let response = client
.get("https://api.modrinth.com/v2/user")
.header("Authorization", token)
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(crate::error::PakkerError::PlatformApiError(format!(
"HTTP {}",
response.status()
)))
}
}
async fn test_github(client: &reqwest::Client, token: &str) -> Result<()> {
let response = client
.get("https://api.github.com/user")
.header("Authorization", format!("Bearer {token}"))
.header("User-Agent", "pakker")
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(crate::error::PakkerError::PlatformApiError(format!(
"HTTP {}",
response.status()
)))
}
}

View file

@ -0,0 +1,470 @@
use std::{
collections::{HashMap, HashSet},
fmt::Write,
fs,
path::Path,
};
use crate::{cli::DiffArgs, error::Result, model::LockFile};
#[derive(Debug)]
enum ChangeType {
Added,
Removed,
Updated,
}
#[derive(Debug)]
struct ProjectChange {
name: String,
change_type: ChangeType,
old_file: Option<String>,
new_file: Option<String>,
}
pub fn execute(args: &DiffArgs, _lockfile_path: &Path) -> Result<()> {
log::info!("Comparing lockfiles");
// Load old lockfile
let old_path = Path::new(&args.old_lockfile);
let old_dir = old_path.parent().unwrap_or_else(|| Path::new("."));
let old_lockfile = LockFile::load(old_dir)?;
// Load current lockfile
let current_path = args
.current_lockfile
.as_ref()
.map_or_else(|| Path::new("pakku-lock.json"), Path::new);
let current_dir = current_path.parent().unwrap_or_else(|| Path::new("."));
let current_lockfile = LockFile::load(current_dir)?;
// Compare metadata
let mut changes = Vec::new();
// Check MC versions
let old_mc: HashSet<_> = old_lockfile.mc_versions.iter().collect();
let new_mc: HashSet<_> = current_lockfile.mc_versions.iter().collect();
let mc_added: Vec<_> = new_mc.difference(&old_mc).collect();
let mc_removed: Vec<_> = old_mc.difference(&new_mc).collect();
// Check loaders
let old_loaders = &old_lockfile.loaders;
let new_loaders = &current_lockfile.loaders;
// Compare projects
let old_projects: HashMap<_, _> = old_lockfile
.projects
.iter()
.map(|p| (&p.pakku_id, p))
.collect();
let new_projects: HashMap<_, _> = current_lockfile
.projects
.iter()
.map(|p| (&p.pakku_id, p))
.collect();
// Find added, removed, updated projects
for (id, new_proj) in &new_projects {
if !old_projects.contains_key(id) {
changes.push(ProjectChange {
name: new_proj.name.values().next().cloned().unwrap_or_default(),
change_type: ChangeType::Added,
old_file: None,
new_file: new_proj.files.first().map(|f| f.file_name.clone()),
});
} else if let Some(old_proj) = old_projects.get(id) {
let old_file_name = old_proj.files.first().map(|f| &f.file_name);
let new_file_name = new_proj.files.first().map(|f| &f.file_name);
if old_file_name != new_file_name {
changes.push(ProjectChange {
name: new_proj
.name
.values()
.next()
.cloned()
.unwrap_or_default(),
change_type: ChangeType::Updated,
old_file: old_file_name.cloned(),
new_file: new_file_name.cloned(),
});
}
}
}
for (id, old_proj) in &old_projects {
if !new_projects.contains_key(id) {
changes.push(ProjectChange {
name: old_proj.name.values().next().cloned().unwrap_or_default(),
change_type: ChangeType::Removed,
old_file: old_proj.files.first().map(|f| f.file_name.clone()),
new_file: None,
});
}
}
// Output results
if let Some(path) = &args.markdown_diff {
write_markdown_diff(
path,
&old_lockfile,
&current_lockfile,
&changes,
&mc_added,
&mc_removed,
old_loaders,
new_loaders,
args.verbose,
args.header_size,
)?;
} else if let Some(path) = &args.markdown {
write_markdown(
path,
&old_lockfile,
&current_lockfile,
&changes,
&mc_added,
&mc_removed,
old_loaders,
new_loaders,
args.verbose,
args.header_size,
)?;
} else {
print_terminal_diff(
&old_lockfile,
&current_lockfile,
&changes,
&mc_added,
&mc_removed,
old_loaders,
new_loaders,
args.verbose,
);
}
Ok(())
}
#[expect(
clippy::too_many_arguments,
reason = "diff formatting requires all display parameters"
)]
fn print_terminal_diff(
old: &LockFile,
new: &LockFile,
changes: &[ProjectChange],
mc_added: &[&&String],
mc_removed: &[&&String],
old_loaders: &HashMap<String, String>,
new_loaders: &HashMap<String, String>,
verbose: bool,
) {
println!("## Lockfile Comparison\n");
// Target
if old.target != new.target {
println!("Target: {:?} -> {:?}", old.target, new.target);
}
// MC versions
if !mc_removed.is_empty() || !mc_added.is_empty() {
println!("Minecraft Versions:");
for v in mc_removed {
println!(" - {v}");
}
for v in mc_added {
println!(" + {v}");
}
}
// Loaders
let mut loader_changes = false;
for (name, old_ver) in old_loaders {
if let Some(new_ver) = new_loaders.get(name) {
if old_ver != new_ver {
if !loader_changes {
println!("\nLoaders:");
loader_changes = true;
}
println!(" ~ {name}: {old_ver} -> {new_ver}");
}
} else {
if !loader_changes {
println!("\nLoaders:");
loader_changes = true;
}
println!(" - {name}: {old_ver}");
}
}
for (name, new_ver) in new_loaders {
if !old_loaders.contains_key(name) {
if !loader_changes {
println!("\nLoaders:");
loader_changes = true;
}
println!(" + {name}: {new_ver}");
}
}
// Projects
if !changes.is_empty() {
println!("\nProjects:");
for change in changes {
match change.change_type {
ChangeType::Added => {
print!(" + {}", change.name);
if verbose && let Some(file) = &change.new_file {
print!(" ({file})");
}
println!();
},
ChangeType::Removed => {
print!(" - {}", change.name);
if verbose && let Some(file) = &change.old_file {
print!(" ({file})");
}
println!();
},
ChangeType::Updated => {
print!(" ~ {}", change.name);
if verbose
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
{
print!(" ({old} -> {new})");
}
println!();
},
}
}
}
if mc_removed.is_empty()
&& mc_added.is_empty()
&& !loader_changes
&& changes.is_empty()
{
println!("✓ No differences found");
}
}
#[expect(
clippy::too_many_arguments,
reason = "diff markdown writer requires all context parameters"
)]
fn write_markdown_diff(
path: &str,
old: &LockFile,
new: &LockFile,
changes: &[ProjectChange],
mc_added: &[&&String],
mc_removed: &[&&String],
old_loaders: &HashMap<String, String>,
new_loaders: &HashMap<String, String>,
verbose: bool,
_header_size: usize,
) -> Result<()> {
let mut content = String::new();
content.push_str("```diff\n");
// Metadata changes
if old.target != new.target {
let _ = writeln!(content, "- Target: {:?}", old.target);
let _ = writeln!(content, "+ Target: {:?}", new.target);
}
if !mc_removed.is_empty() || !mc_added.is_empty() {
content.push_str("\nMinecraft Versions:\n");
for v in mc_removed {
let _ = writeln!(content, "- {v}");
}
for v in mc_added {
let _ = writeln!(content, "+ {v}");
}
}
// Loaders
for (name, old_ver) in old_loaders {
if let Some(new_ver) = new_loaders.get(name) {
if old_ver != new_ver {
let _ = writeln!(content, "- {name}: {old_ver}");
let _ = writeln!(content, "+ {name}: {new_ver}");
}
} else {
let _ = writeln!(content, "- {name}: {old_ver}");
}
}
for (name, new_ver) in new_loaders {
if !old_loaders.contains_key(name) {
let _ = writeln!(content, "+ {name}: {new_ver}");
}
}
// Projects
if !changes.is_empty() {
content.push_str("\nProjects:\n");
for change in changes {
match change.change_type {
ChangeType::Added => {
let _ = write!(content, "+ {}", change.name);
if verbose && let Some(file) = &change.new_file {
let _ = write!(content, " ({file})");
}
content.push('\n');
},
ChangeType::Removed => {
let _ = write!(content, "- {}", change.name);
if verbose && let Some(file) = &change.old_file {
let _ = write!(content, " ({file})");
}
content.push('\n');
},
ChangeType::Updated => {
if verbose {
if let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
{
let _ = writeln!(content, "- {} ({})", change.name, old);
let _ = writeln!(content, "+ {} ({})", change.name, new);
}
} else {
let _ = writeln!(content, "~ {}", change.name);
}
},
}
}
}
content.push_str("```\n");
fs::write(path, content)?;
println!("Diff exported to {path}");
Ok(())
}
#[expect(
clippy::too_many_arguments,
reason = "diff markdown writer requires all context parameters"
)]
fn write_markdown(
path: &str,
old: &LockFile,
new: &LockFile,
changes: &[ProjectChange],
mc_added: &[&&String],
mc_removed: &[&&String],
old_loaders: &HashMap<String, String>,
new_loaders: &HashMap<String, String>,
verbose: bool,
header_size: usize,
) -> Result<()> {
let header = "#".repeat(header_size.min(5));
let mut content = String::new();
let _ = write!(content, "{header} Lockfile Comparison\n\n");
// Target
if old.target != new.target {
let _ = write!(
content,
"**Target:** {:?} → {:?}\n\n",
old.target, new.target
);
}
// MC versions
if !mc_removed.is_empty() || !mc_added.is_empty() {
let _ = write!(content, "{header} Minecraft Versions\n\n");
for v in mc_removed {
let _ = writeln!(content, "- ~~{v}~~");
}
for v in mc_added {
let _ = writeln!(content, "- **{v}** (new)");
}
content.push('\n');
}
// Loaders
let mut has_loader_changes = false;
let mut loader_content = String::new();
for (name, old_ver) in old_loaders {
if let Some(new_ver) = new_loaders.get(name) {
if old_ver != new_ver {
has_loader_changes = true;
let _ = writeln!(loader_content, "- **{name}:** {old_ver} → {new_ver}");
}
} else {
has_loader_changes = true;
let _ = writeln!(loader_content, "- ~~{name}: {old_ver}~~");
}
}
for (name, new_ver) in new_loaders {
if !old_loaders.contains_key(name) {
has_loader_changes = true;
let _ = writeln!(loader_content, "- **{name}: {new_ver}** (new)");
}
}
if has_loader_changes {
let _ = write!(content, "{header} Loaders\n\n");
content.push_str(&loader_content);
content.push('\n');
}
// Projects
if !changes.is_empty() {
let _ = write!(content, "{header} Projects\n\n");
let added: Vec<_> = changes
.iter()
.filter(|c| matches!(c.change_type, ChangeType::Added))
.collect();
let removed: Vec<_> = changes
.iter()
.filter(|c| matches!(c.change_type, ChangeType::Removed))
.collect();
let updated: Vec<_> = changes
.iter()
.filter(|c| matches!(c.change_type, ChangeType::Updated))
.collect();
if !added.is_empty() {
let _ = write!(content, "{}# Added ({})\n\n", header, added.len());
for change in added {
let _ = write!(content, "- **{}**", change.name);
if verbose && let Some(file) = &change.new_file {
let _ = write!(content, " ({file})");
}
content.push('\n');
}
content.push('\n');
}
if !removed.is_empty() {
let _ = write!(content, "{}# Removed ({})\n\n", header, removed.len());
for change in removed {
let _ = write!(content, "- ~~{}~~", change.name);
if verbose && let Some(file) = &change.old_file {
let _ = write!(content, " ({file})");
}
content.push('\n');
}
content.push('\n');
}
if !updated.is_empty() {
let _ = write!(content, "{}# Updated ({})\n\n", header, updated.len());
for change in updated {
let _ = write!(content, "- **{}**", change.name);
if verbose
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
{
let _ = write!(content, " ({old} → {new})");
}
content.push('\n');
}
content.push('\n');
}
}
fs::write(path, content)?;
println!("Diff exported to {path}");
Ok(())
}

View file

@ -0,0 +1,316 @@
use std::path::Path;
use crate::{
cli::ExportArgs,
error::{PakkerError, Result},
export::Exporter,
ipc::{IpcCoordinator, OperationType},
model::{Config, LockFile, fork::LocalConfig},
utils::hash::compute_sha256_bytes,
};
#[expect(clippy::future_not_send, reason = "not required to be Send")]
pub async fn execute(
args: ExportArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
if let Some(ref profile) = args.profile {
log::info!("Exporting with profile: {profile}");
} else {
log::info!("Exporting all profiles");
}
// Handle --no-server flag
if args.no_server {
log::info!("Server content will be excluded from export");
}
// Handle --show-io-errors flag
let show_io_errors = args.show_io_errors;
if show_io_errors {
log::info!("IO errors will be shown during export");
}
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
// IPC coordination - prevent concurrent operations on the same modpack
let ipc = IpcCoordinator::new(config_dir)?;
let ipc_timeout = std::time::Duration::from_secs(60);
// Check for conflicting export operations
let conflicting = ipc.get_running_operations(OperationType::Export);
if !conflicting.is_empty() {
log::info!(
"Waiting for conflicting operations to complete: {:?}",
conflicting
.iter()
.map(|op| (op.id.clone(), op.pid))
.collect::<Vec<_>>()
);
ipc
.wait_for_conflicts(OperationType::Export, ipc_timeout)
.await?;
}
// Register this export operation
let _op_guard = ipc.register_operation(OperationType::Export)?;
// Load config to check for fork configuration
let config = Config::load(config_dir)?;
let local_config = LocalConfig::load(config_dir).ok();
// Check if this is a fork with parent
let lockfile = if let Some(local_cfg) = &local_config {
if local_cfg.parent.is_some() {
log::info!("Fork detected - merging parent and local lockfiles");
// Try parent's lockfile
let parent_paths = [".pakku/parent", ".pakker/parent"];
let mut parent_lockfile_path = None;
let mut lockfile_name = "pakku-lock.json";
for parent_dir in &parent_paths {
// Try pakker-lock.json first
let check_path = Path::new(parent_dir).join("pakker-lock.json");
if check_path.exists() {
parent_lockfile_path = Some(parent_dir);
lockfile_name = "pakker-lock.json";
break;
}
// Fall back to pakku-lock.json
let check_path = Path::new(parent_dir).join("pakku-lock.json");
if check_path.exists() {
parent_lockfile_path = Some(parent_dir);
lockfile_name = "pakku-lock.json";
break;
}
}
if let Some(parent_dir) = parent_lockfile_path {
// Load parent lockfile
let parent_lockfile = LockFile::load(Path::new(parent_dir))?;
// Verify parent lockfile hash for integrity
if let Some(stored_hash) = &local_cfg.parent_lock_hash {
let parent_lock_path = Path::new(parent_dir).join(lockfile_name);
let parent_lock_content = std::fs::read(&parent_lock_path)?;
let computed_hash = compute_sha256_bytes(&parent_lock_content);
if &computed_hash != stored_hash {
log::warn!(
"Parent lockfile hash mismatch - parent may have changed since \
last sync"
);
log::warn!("Expected: {stored_hash}, Got: {computed_hash}");
}
}
// Load local lockfile if it exists
if lockfile_path.exists() {
log::info!("Merging parent lockfile with local overrides");
let local_lockfile =
LockFile::load_with_validation(lockfile_dir, false)?;
// Merge: start with parent, override with local
merge_lockfiles(parent_lockfile, &local_lockfile, local_cfg)
} else {
log::info!("No local lockfile - using parent lockfile");
parent_lockfile
}
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Fork configured but parent lockfile not found",
)));
}
} else {
// No fork, use local lockfile
if lockfile_path.exists() {
LockFile::load(lockfile_dir)?
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No lockfile found",
)));
}
}
} else {
// No local config, try local lockfile or fall back to parent
if lockfile_path.exists() {
LockFile::load(lockfile_dir)?
} else {
// Try parent's lockfile as fallback
let parent_paths = [".pakku/parent", ".pakker/parent"];
let mut parent_lockfile = None;
let mut lockfile_name = "pakku-lock.json";
for parent_dir in &parent_paths {
// Try pakker-lock.json first
let lockfile_path_check =
Path::new(parent_dir).join("pakker-lock.json");
if lockfile_path_check.exists() {
parent_lockfile = Some(parent_dir);
lockfile_name = "pakker-lock.json";
break;
}
// Fall back to pakku-lock.json
let lockfile_path_check = Path::new(parent_dir).join("pakku-lock.json");
if lockfile_path_check.exists() {
parent_lockfile = Some(parent_dir);
lockfile_name = "pakku-lock.json";
break;
}
}
match parent_lockfile {
Some(parent_dir) => {
log::info!(
"Using parent's lockfile ({lockfile_name}) from {parent_dir}"
);
LockFile::load(Path::new(parent_dir))?
},
None => {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No lockfile found (neither local nor parent's)",
)));
},
}
}
};
// Determine output path
let output_path = if args.pakker_layout {
"build"
} else {
args.output.as_deref().unwrap_or("exports")
};
// Create exporter
let exporter = Exporter::new(".");
// Export based on profile argument
if let Some(profile_name) = args.profile {
// Single profile export (backwards compatible)
let output_file = exporter
.export(&profile_name, &lockfile, &config, Path::new(output_path))
.await?;
println!("Export complete: {}", output_file.display());
} else {
// Multi-profile export (Pakker-compatible default behavior)
let output_files = exporter
.export_all_profiles(&lockfile, &config, Path::new(output_path))
.await?;
println!("\nExported {} files:", output_files.len());
for output_file in output_files {
println!(" - {}", output_file.display());
}
}
Ok(())
}
/// Merges parent lockfile with local lockfile
/// Parent projects are used as base, local projects override parent projects
/// with same slug
fn merge_lockfiles(
parent: LockFile,
local: &LockFile,
local_config: &LocalConfig,
) -> LockFile {
let mut merged = LockFile {
target: parent.target, // Use parent target
mc_versions: parent.mc_versions, // Use parent MC versions
loaders: parent.loaders, // Use parent loaders
projects: Vec::new(),
lockfile_version: parent.lockfile_version,
};
// Collect local project slugs for override detection
let mut local_slugs = std::collections::HashSet::new();
for project in &local.projects {
for slug in project.slug.values() {
local_slugs.insert(slug.clone());
}
}
// Collect excluded slugs from local config
let excluded: std::collections::HashSet<_> =
local_config.excludes.iter().collect();
// Add parent projects that are NOT overridden by local and NOT excluded
let mut parent_kept = 0usize;
for parent_project in &parent.projects {
let is_overridden = parent_project
.slug
.values()
.any(|slug| local_slugs.contains(slug));
let is_excluded = parent_project
.slug
.values()
.any(|slug| excluded.contains(slug))
|| parent_project
.name
.values()
.any(|name| excluded.contains(name));
if is_overridden || is_excluded {
continue;
}
let mut project = parent_project.clone();
// Apply local config attribute overrides (side, type, etc.)
for (key, local_proj_cfg) in &local_config.projects {
let matches = project.slug.values().any(|s| s == key)
|| project.name.values().any(|n| n == key)
|| project.pakku_id.as_ref() == Some(key);
if matches {
if let Some(t) = local_proj_cfg.r#type {
project.r#type = t;
}
if let Some(s) = local_proj_cfg.side {
project.side = s;
}
if let Some(us) = local_proj_cfg.update_strategy {
project.update_strategy = us;
}
if let Some(r) = local_proj_cfg.redistributable {
project.redistributable = r;
}
if let Some(ref sp) = local_proj_cfg.subpath {
project.subpath = Some(sp.clone());
}
if let Some(ref aliases) = local_proj_cfg.aliases {
project.aliases = aliases.iter().cloned().collect();
}
if let Some(e) = local_proj_cfg.export {
project.export = e;
}
break;
}
}
merged.projects.push(project);
parent_kept += 1;
}
// Add all local projects
merged.projects.extend(local.projects.clone());
println!(
"Merged fork: {} parent + {} local = {} total projects",
parent_kept,
local.projects.len(),
merged.projects.len()
);
merged
}

View file

@ -0,0 +1,51 @@
use std::path::{Path, PathBuf};
use crate::{
cli::FetchArgs,
error::Result,
fetch::Fetcher,
ipc::{IpcCoordinator, OperationGuard, OperationType},
model::{Config, LockFile},
};
pub async fn execute(
args: FetchArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Create IPC coordinator for this modpack
let working_dir = PathBuf::from(".");
let coordinator = IpcCoordinator::new(&working_dir)?;
// Check for conflicting operations
if coordinator.has_running_operation(OperationType::Fetch) {
// Wait for conflicting operations to complete with timeout
let timeout = std::time::Duration::from_secs(args.timeout.unwrap_or(300));
coordinator
.wait_for_conflicts(OperationType::Fetch, timeout)
.await?;
}
// Register this fetch operation
let operation_id = coordinator.register_operation(OperationType::Fetch)?;
let _guard = OperationGuard::new(coordinator, operation_id);
// Create fetcher with shelve option
let fetcher = Fetcher::new(".")
.with_shelve(args.shelve)
.with_retry(args.retry);
// Fetch all projects (progress indicators handled in fetch.rs)
fetcher.fetch_all(&lockfile, &config).await?;
println!("Fetch complete");
Ok(())
}

View file

@ -0,0 +1,899 @@
use std::{
collections::{HashMap, HashSet},
fs,
io::Write,
path::Path,
time::Duration,
};
use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::ForkArgs,
error::PakkerError,
git::{self, VcsType},
model::{
LockFile,
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
},
};
const PAKKU_DIR: &str = ".pakku";
const PARENT_DIR_NAME: &str = "parent";
fn parent_dir() -> String {
format!("{PAKKU_DIR}/{PARENT_DIR_NAME}")
}
/// Main entry point for fork commands
pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
match &args.subcommand {
crate::cli::ForkSubcommand::Init {
git_url,
from_current,
from_path,
ref_name,
ref_type,
remote,
} => {
execute_init(
git_url.clone(),
*from_current,
from_path.clone(),
ref_name.clone(),
*ref_type,
remote.clone(),
)
},
crate::cli::ForkSubcommand::Set {
git_url,
ref_name,
ref_type,
remote,
} => {
execute_set(git_url.clone(), ref_name.clone(), *ref_type, remote.clone())
},
crate::cli::ForkSubcommand::Show => execute_show(),
crate::cli::ForkSubcommand::Unset => execute_unset(),
crate::cli::ForkSubcommand::Sync => execute_sync(),
crate::cli::ForkSubcommand::Promote { projects } => {
execute_promote(projects)
},
crate::cli::ForkSubcommand::Exclude { projects } => {
execute_exclude(projects)
},
crate::cli::ForkSubcommand::Include { projects } => {
execute_include(projects)
},
}
}
fn validate_git_url(url: &str) -> Result<(), PakkerError> {
// Allow network URLs, SSH-style URLs, or local filesystem paths (tests use
// local bare repos)
if url.starts_with("https://")
|| url.starts_with("git@")
|| url.starts_with("ssh://")
|| url.starts_with("file://")
|| url.starts_with('/')
{
Ok(())
} else {
Err(PakkerError::Fork(format!(
"Invalid git URL: {url}. Expected https://, git@, ssh://, file://, or \
absolute filesystem path."
)))
}
}
fn execute_init(
git_url: Option<String>,
from_current: bool,
from_path: Option<String>,
ref_name: Option<String>,
ref_type: Option<RefType>,
remote: Option<String>,
) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
// Validate that pakker.json exists for fork operations
let pakker_json_path = config_dir.join("pakker.json");
let pakku_json_path = config_dir.join("pakku.json");
if !pakker_json_path.exists() && pakku_json_path.exists() {
return Err(PakkerError::Fork(
"Forking is a pakker-specific feature and requires pakker.json. \nFound \
pakku.json but not pakker.json. Please migrate to pakker.json to use \
fork functionality.\nYou can convert your pakku.json to pakker.json by \
renaming the file."
.to_string(),
));
}
let mut local_config = LocalConfig::load(config_dir).unwrap_or_default();
// Check if parent already configured
if local_config.parent.is_some()
&& let Some(parent) = &local_config.parent
{
return Err(PakkerError::Fork(format!(
"Parent already configured: {}",
parent.id
)));
}
// Resolve defaults early to avoid shadowing/confusion
let resolved_remote = remote.unwrap_or_else(|| "origin".to_string());
let resolved_ref = ref_name.unwrap_or_else(|| "main".to_string());
// Parent path (where we keep the cloned parent)
let parent_path_str = parent_dir();
// Branch: from_current, from_path, or git_url
let mut cloned_from_local = false;
let url = if from_current {
// Detect git URL from current directory
if !git::is_git_repository(config_dir) {
return Err(PakkerError::Fork(
"Not a git repository. Use --git-url or run 'git init' first."
.to_string(),
));
}
git::get_remote_url(config_dir, &resolved_remote)?
} else if let Some(fp) = from_path {
// Use provided local path as source; infer upstream remote from it
let path = Path::new(&fp);
if !git::is_git_repository(path) {
return Err(PakkerError::Fork(format!(
"Provided path is not a git repository: {}",
path.display()
)));
}
// Infer upstream remote URL from the existing local clone
let upstream_url = git::get_primary_remote_url(path)?;
// Reject file:// or non-network remotes
validate_git_url(&upstream_url)?;
// Ensure working tree is clean
let vcs_type = git::detect_vcs_type(path);
if git::repo_has_uncommitted_changes(path)? {
let error_msg = match vcs_type {
VcsType::Git => {
"Local repository at --from-path has uncommitted changes. Commit or \
stash them before proceeding."
},
VcsType::Jujutsu => {
"Local repository at --from-path has uncommitted changes. Run 'jj \
commit' to save changes before proceeding."
},
VcsType::None => {
"Local repository at --from-path has uncommitted changes. Please \
clean the directory before proceeding."
},
};
return Err(PakkerError::Fork(error_msg.to_string()));
}
// VCS-specific validation
match vcs_type {
VcsType::Git => {
// Attempt lightweight fetch of remote refs to refresh remote tracking
match git::fetch_remote_light(path, &resolved_remote, &resolved_ref) {
Ok(()) => println!("Fetched remote refs for verification"),
Err(e) => {
log::warn!("Lightweight fetch from upstream failed: {e}");
println!(
"Warning: could not perform lightweight fetch from upstream. \
Proceeding with local clone; subsequent sync may require \
network."
);
},
}
// Compare local ref vs remote ref
let remote_ref = format!("{resolved_remote}/{resolved_ref}");
match git::ahead_behind(path, &resolved_ref, &remote_ref) {
Ok((ahead, _behind)) => {
if ahead > 0 {
return Err(PakkerError::Fork(format!(
"Local repository at {} has {} commits not present on \
upstream {}. Push or use --git-url if you intend to use an \
upstream that contains these commits.",
path.display(),
ahead,
upstream_url
)));
}
},
Err(e) => {
log::warn!("Could not compute ahead/behind: {e}");
},
}
},
VcsType::Jujutsu => {
// For jujutsu, we skip git-specific remote validation since jj has
// different synchronization patterns
println!(
"Warning: Skipping remote validation for jujutsu repository. Ensure \
your jj repo is in sync with remote before proceeding."
);
// Check if there are any changes that haven't been pushed to the remote
if let Ok(output) = std::process::Command::new("jj")
.args(["log", "--limit", "1", "--template", ""])
.current_dir(path)
.output()
&& !output.stdout.is_empty()
{
println!(
"Note: Jujutsu repository detected. Make sure to run 'jj git \
push' to sync changes with remote if needed."
);
}
},
VcsType::None => {
// No VCS-specific validation needed
},
}
// Compute parent lock/config hashes for reproducibility
let parent_lock_path = if path.join("pakker-lock.json").exists() {
path.join("pakker-lock.json")
} else {
path.join("pakku-lock.json")
};
if parent_lock_path.exists() {
let lock_content =
fs::read_to_string(&parent_lock_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
})?;
let lock_hash = hash_content(&lock_content);
local_config.parent_lock_hash = Some(lock_hash);
}
let parent_config_path = if path.join("pakker.json").exists() {
path.join("pakker.json")
} else {
path.join("pakku.json")
};
if parent_config_path.exists() {
let config_content =
fs::read_to_string(&parent_config_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent config: {e}"))
})?;
let config_hash = hash_content(&config_content);
local_config.parent_config_hash = Some(config_hash);
}
// Now clone from the local path into .pakku/parent, this avoids
// re-downloading objects
let parent_path = Path::new(&parent_path_str);
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
spinner.enable_steady_tick(Duration::from_millis(80));
spinner.set_message(format!(
"Cloning parent repository from local path {}...",
path.display()
));
git::clone_repository(&fp, parent_path, &resolved_ref, None)?;
spinner.finish_and_clear();
// Ensure the cloned repo's origin is set to the upstream URL (not the local
// path)
git::set_remote_url(parent_path, &resolved_remote, &upstream_url)?;
// Mark that we've already cloned from local
cloned_from_local = true;
// We will persist upstream_url as the canonical parent id
upstream_url
} else if let Some(url) = git_url {
url
} else {
return Err(PakkerError::Fork(
"Either --git-url, --from-current or --from-path must be specified"
.to_string(),
));
};
let parent_path = Path::new(&parent_path_str);
// If we did not already clone from local, perform network clone and checks
if cloned_from_local {
println!(
"Parent repository was cloned from local path; skipping network clone."
);
} else {
// Check if parent directory already exists and is not empty
if parent_path.exists() {
let is_empty = parent_path
.read_dir()
.map(|mut entries| entries.next().is_none())
.unwrap_or(false);
if !is_empty {
return Err(PakkerError::Fork(format!(
"Directory not empty: {}",
parent_path.display()
)));
}
}
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
spinner.enable_steady_tick(Duration::from_millis(80));
spinner.set_message(format!(
"Cloning parent repository: {url} ({resolved_ref})"
));
git::clone_repository(&url, parent_path, &resolved_ref, None)?;
spinner.finish_and_clear();
}
let commit_sha = git::get_commit_sha(parent_path, &resolved_ref)?;
// Detect ref type if not specified
let resolved_ref_type = if let Some(rt) = ref_type {
rt
} else {
git::resolve_ref_type(parent_path, &resolved_ref)?
};
let parent_config = ParentConfig {
type_: "git".to_string(),
id: url.clone(),
version: Some(commit_sha[..8].to_string()),
ref_: resolved_ref.clone(),
ref_type: resolved_ref_type,
remote_name: resolved_remote,
};
local_config.parent = Some(parent_config);
local_config.save(config_dir)?;
// Add .pakku/parent to .gitignore
add_to_gitignore()?;
println!();
println!("✓ Fork initialized successfully");
println!(" Parent: {url}");
println!(" Ref: {} ({})", resolved_ref, match resolved_ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!(" Commit: {}", &commit_sha[..8]);
println!();
println!("Run 'pakku fork sync' to sync with parent.");
Ok(())
}
fn execute_set(
git_url: Option<String>,
ref_name: String,
ref_type: Option<RefType>,
remote: Option<String>,
) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
let Some(mut parent) = local_config.parent else {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
};
if let Some(url) = git_url {
validate_git_url(&url)?;
parent.id = url;
}
parent.ref_ = ref_name;
if let Some(rt) = ref_type {
parent.ref_type = rt;
}
if let Some(remote_name) = remote {
parent.remote_name = remote_name;
}
local_config.parent = Some(parent.clone());
local_config.save(config_dir)?;
println!("✓ Fork configuration updated");
println!(" Parent: {}", parent.id);
println!(" Ref: {} ({})", parent.ref_, match parent.ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!();
println!("Run 'pakku fork sync' to sync with new configuration.");
Ok(())
}
fn execute_show() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
if let Some(parent) = local_config.parent {
println!("Fork Configuration:");
println!(" Parent URL: {}", parent.id);
println!(" Type: {}", match parent.ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!(" Ref: {}", parent.ref_);
println!(" Remote: {}", parent.remote_name);
if let Some(version) = parent.version {
println!(" Last synced commit: {version}");
} else {
println!(" Last synced commit: never synced");
}
if !local_config.projects.is_empty() {
println!();
println!("Project Overrides ({}):", local_config.projects.len());
for (slug, proj_config) in &local_config.projects {
print!(" - {slug}");
let mut details = Vec::new();
if let Some(version) = &proj_config.version {
details.push(format!("version={version}"));
}
if let Some(side) = &proj_config.side {
details.push(format!("side={side}"));
}
if let Some(strategy) = &proj_config.update_strategy {
details.push(format!("updateStrategy={strategy}"));
}
if !details.is_empty() {
print!(" ({})", details.join(", "));
}
println!();
}
}
} else {
println!("No fork configured.");
println!("Run 'pakku fork init' to initialize a fork.");
}
Ok(())
}
fn execute_unset() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
println!("No fork configured.");
return Ok(());
}
// Prompt for confirmation
print!("Are you sure you want to remove fork configuration? [y/N] ");
std::io::stdout().flush().map_err(PakkerError::IoError)?;
let mut input = String::new();
std::io::stdin()
.read_line(&mut input)
.map_err(PakkerError::IoError)?;
if !input.trim().eq_ignore_ascii_case("y") {
println!("Cancelled.");
return Ok(());
}
// Remove parent directory
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if parent_path.exists() {
fs::remove_dir_all(parent_path).map_err(|e| {
PakkerError::Fork(format!("Failed to remove parent directory: {e}"))
})?;
}
// Clear parent configuration
local_config.parent = None;
local_config.parent_lock_hash = None;
local_config.parent_config_hash = None;
local_config.save(config_dir)?;
println!("✓ Fork configuration removed");
Ok(())
}
/// Snapshot parent lockfile as slug → first file name map
fn snapshot_parent_projects(
parent_path: &Path,
) -> HashMap<String, Option<String>> {
let lockfile_path = if parent_path.join("pakker-lock.json").exists() {
parent_path.join("pakker-lock.json")
} else {
parent_path.join("pakku-lock.json")
};
if !lockfile_path.exists() {
return HashMap::new();
}
match LockFile::load_with_validation(parent_path, false) {
Ok(lf) => {
lf.projects
.iter()
.map(|p| {
let slug = p
.slug
.values()
.next()
.cloned()
.or_else(|| p.name.values().next().cloned())
.unwrap_or_default();
let file = p.files.first().map(|f| f.file_name.clone());
(slug, file)
})
.collect()
},
Err(_) => HashMap::new(),
}
}
fn execute_sync() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
let parent = local_config.parent.as_ref().ok_or_else(|| {
PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
)
})?;
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
// Snapshot before update
let before = snapshot_parent_projects(parent_path);
if parent_path.exists() {
println!("Fetching parent updates...");
git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?;
git::reset_to_ref(parent_path, &parent.remote_name, &parent.ref_)?;
} else {
println!("Parent repository not found. Cloning...");
git::clone_repository(&parent.id, parent_path, &parent.ref_, None)?;
}
let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?;
// Snapshot after update
let after = snapshot_parent_projects(parent_path);
let mut integrity = None;
// Try pakker files first, fall back to pakku files
let parent_lock_path = if parent_path.join("pakker-lock.json").exists() {
parent_path.join("pakker-lock.json")
} else {
parent_path.join("pakku-lock.json")
};
let parent_config_path = if parent_path.join("pakker.json").exists() {
parent_path.join("pakker.json")
} else {
parent_path.join("pakku.json")
};
if parent_lock_path.exists() {
let lock_content = fs::read_to_string(&parent_lock_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
})?;
let lock_hash = hash_content(&lock_content);
local_config.parent_lock_hash = Some(lock_hash);
let config_content = if parent_config_path.exists() {
fs::read_to_string(&parent_config_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent config: {e}"))
})?
} else {
String::new()
};
let config_hash = hash_content(&config_content);
local_config.parent_config_hash = Some(config_hash);
integrity = Some(ForkIntegrity::new(
local_config.parent_lock_hash.clone().unwrap_or_default(),
commit_sha.clone(),
local_config.parent_config_hash.clone().unwrap_or_default(),
));
}
if let Some(ref integrity_data) = integrity {
log::info!(
"Parent integrity verified at timestamp {}",
integrity_data.verified_at
);
}
if let Some(parent) = local_config.parent.as_mut() {
parent.version = Some(commit_sha[..8].to_string());
}
local_config.save(config_dir)?;
println!();
println!("✓ Parent sync complete");
println!(" Commit: {}", &commit_sha[..8]);
// Print diff of parent changes
let before_keys: HashSet<_> = before.keys().collect();
let after_keys: HashSet<_> = after.keys().collect();
let added: Vec<_> = after_keys.difference(&before_keys).collect();
let removed: Vec<_> = before_keys.difference(&after_keys).collect();
let mut updated: Vec<(&String, &Option<String>, &Option<String>)> =
Vec::new();
for slug in before_keys.intersection(&after_keys) {
if before[*slug] != after[*slug] {
updated.push((slug, &before[*slug], &after[*slug]));
}
}
if added.is_empty() && removed.is_empty() && updated.is_empty() {
println!(" No changes in parent projects.");
} else {
println!();
println!(" Parent project changes:");
let mut added: Vec<_> = added;
added.sort();
for slug in added {
let file = after[*slug].as_deref().unwrap_or("?");
println!(" + {slug} ({file})");
}
let mut removed: Vec<_> = removed;
removed.sort();
for slug in removed {
let file = before[*slug].as_deref().unwrap_or("?");
println!(" - {slug} ({file})");
}
updated.sort_by_key(|(slug, ..)| *slug);
for (slug, old_file, new_file) in updated {
let old = old_file.as_deref().unwrap_or("?");
let new = new_file.as_deref().unwrap_or("?");
println!(" ~ {slug}: {old}{new}");
}
}
println!();
println!("Run 'pakku export' to merge changes from parent.");
Ok(())
}
fn execute_promote(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
if projects.is_empty() {
return Err(PakkerError::Fork(
"No projects specified. Usage: pakku fork promote <project>..."
.to_string(),
));
}
// Load parent lockfile
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if !parent_path.exists() {
return Err(PakkerError::Fork(
"Parent directory not found. Run 'pakku fork sync' first.".to_string(),
));
}
let parent_lockfile = LockFile::load_with_validation(parent_path, false)
.map_err(|e| {
PakkerError::Fork(format!("Failed to load parent lockfile: {e}"))
})?;
// Load or create local lockfile
let lockfile_path = if config_dir.join("pakker-lock.json").exists() {
config_dir.join("pakker-lock.json")
} else {
config_dir.join("pakku-lock.json")
};
let mut local_lockfile = if lockfile_path.exists() {
LockFile::load_with_validation(config_dir, false).map_err(|e| {
PakkerError::Fork(format!("Failed to load local lockfile: {e}"))
})?
} else {
// Bootstrap from parent metadata
LockFile {
target: parent_lockfile.target,
mc_versions: parent_lockfile.mc_versions.clone(),
loaders: parent_lockfile.loaders.clone(),
projects: Vec::new(),
lockfile_version: parent_lockfile.lockfile_version,
}
};
// Track which requested projects we found
let mut promoted = Vec::new();
let mut not_found = Vec::new();
for project_arg in projects {
let found = parent_lockfile.projects.iter().find(|p| {
p.slug.values().any(|s| s == project_arg)
|| p.name.values().any(|n| n == project_arg)
|| p.pakku_id.as_deref() == Some(project_arg)
});
if let Some(project) = found {
// Skip if already in local lockfile
let already_local = local_lockfile.projects.iter().any(|lp| {
lp.slug
.values()
.any(|s| project.slug.values().any(|ps| s == ps))
});
if already_local {
println!(" ~ {project_arg}: already in local lockfile, skipping");
continue;
}
local_lockfile.add_project(project.clone());
promoted.push(project_arg);
} else {
not_found.push(project_arg);
}
}
if !not_found.is_empty() {
for name in &not_found {
eprintln!(" ! {name}: not found in parent lockfile");
}
return Err(PakkerError::Fork(format!(
"{} project(s) not found in parent lockfile",
not_found.len()
)));
}
if promoted.is_empty() {
println!("No projects promoted (all already in local lockfile).");
return Ok(());
}
local_lockfile.save(config_dir)?;
println!("Promoted {} project(s) to local lockfile:", promoted.len());
for name in &promoted {
println!(" + {name}");
}
println!();
println!(
"These projects are now locally managed and will override the parent."
);
Ok(())
}
fn execute_exclude(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut added = Vec::new();
for slug in projects {
if local_config.excludes.contains(slug) {
println!(" ~ {slug}: already excluded");
} else {
local_config.excludes.push(slug.clone());
added.push(slug);
}
}
local_config.excludes.sort();
local_config.save(config_dir)?;
if !added.is_empty() {
println!("Excluded {} project(s) from parent:", added.len());
for slug in &added {
println!(" - {slug}");
}
println!();
println!("These parent projects will be omitted from exports.");
}
Ok(())
}
fn execute_include(projects: &[String]) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut removed = Vec::new();
for slug in projects {
if let Some(pos) = local_config.excludes.iter().position(|s| s == slug) {
local_config.excludes.remove(pos);
removed.push(slug);
} else {
println!(" ~ {slug}: not in excludes list");
}
}
local_config.save(config_dir)?;
if !removed.is_empty() {
println!("Re-included {} project(s) from parent:", removed.len());
for slug in &removed {
println!(" + {slug}");
}
println!();
println!("These parent projects will be included in exports again.");
}
Ok(())
}
fn add_to_gitignore() -> Result<(), PakkerError> {
let gitignore_path = Path::new(".gitignore");
let parent_dir = parent_dir();
// Check if .gitignore exists and already contains the entry
if gitignore_path.exists() {
let content = fs::read_to_string(gitignore_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read .gitignore: {e}"))
})?;
if content.lines().any(|line| line.trim() == parent_dir) {
return Ok(());
}
}
// Append to .gitignore
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(gitignore_path)
.map_err(|e| {
PakkerError::Fork(format!("Failed to open .gitignore: {e}"))
})?;
writeln!(file, "{parent_dir}").map_err(|e| {
PakkerError::Fork(format!("Failed to write to .gitignore: {e}"))
})?;
Ok(())
}

View file

@ -0,0 +1,415 @@
use std::{collections::HashMap, path::Path};
use crate::{
cli::ImportArgs,
error::{PakkerError, Result},
model::{Config, LockFile, Target},
ui_utils::prompt_yes_no,
};
pub async fn execute(
args: ImportArgs,
global_yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
let skip_prompts = global_yes;
log::info!("Importing modpack from {}", args.file);
log::info!(
"Dependency resolution: {}",
if args.deps { "enabled" } else { "disabled" }
);
let path = Path::new(&args.file);
if !path.exists() {
return Err(PakkerError::FileNotFound(
path.to_string_lossy().to_string(),
));
}
// Check if lockfile or config already exist
if (lockfile_path.exists() || config_path.exists()) && !skip_prompts {
let msg = if lockfile_path.exists() && config_path.exists() {
"Both pakku-lock.json and pakku.json exist. Importing will overwrite \
them. Continue?"
} else if lockfile_path.exists() {
"pakku-lock.json exists. Importing will overwrite it. Continue?"
} else {
"pakku.json exists. Importing will overwrite it. Continue?"
};
if !prompt_yes_no(msg, false, skip_prompts)? {
log::info!("Import cancelled by user");
return Ok(());
}
}
// Detect format by checking file contents
let file = std::fs::File::open(path)?;
let mut archive = zip::ZipArchive::new(file)?;
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
if archive.by_name("modrinth.index.json").is_ok() {
drop(archive);
import_modrinth(path, lockfile_dir, config_dir).await
} else if archive.by_name("manifest.json").is_ok() {
drop(archive);
import_curseforge(path, lockfile_dir, config_dir).await
} else {
Err(PakkerError::InvalidImportFile(
"Unknown pack format".to_string(),
))
}
}
async fn import_modrinth(
path: &Path,
lockfile_dir: &Path,
config_dir: &Path,
) -> Result<()> {
use std::{fs::File, io::Read};
use zip::ZipArchive;
use crate::platform::create_platform;
let file = File::open(path)?;
let mut archive = ZipArchive::new(file)?;
let index_content = {
let mut index_file = archive.by_name("modrinth.index.json")?;
let mut content = String::new();
index_file.read_to_string(&mut content)?;
content
};
let index: serde_json::Value = serde_json::from_str(&index_content)?;
// Create lockfile
let mc_version = index["dependencies"]["minecraft"]
.as_str()
.unwrap_or("1.20.1")
.to_string();
let loader = index["dependencies"]["fabric-loader"].as_str().map_or_else(
|| {
index["dependencies"]["forge"].as_str().map_or_else(
|| ("fabric".to_string(), "latest".to_string()),
|forge| ("forge".to_string(), forge.to_string()),
)
},
|fabric| ("fabric".to_string(), fabric.to_string()),
);
let mut loaders = std::collections::HashMap::new();
loaders.insert(loader.0.clone(), loader.1);
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec![mc_version.clone()],
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
// Import projects from files list
if let Some(files) = index["files"].as_array() {
log::info!("Importing {} projects from modpack", files.len());
// Create platform client
let creds = crate::model::credentials::ResolvedCredentials::load();
let platform = create_platform(
"modrinth",
creds.modrinth_token().map(std::string::ToString::to_string),
)?;
for file_entry in files {
if let Some(project_id) = file_entry["downloads"]
.as_array()
.and_then(|downloads| downloads.first())
.and_then(|url| url.as_str())
.and_then(|url| url.split('/').rev().nth(1))
{
log::info!("Fetching project: {project_id}");
match platform
.request_project_with_files(
project_id,
&lockfile.mc_versions,
std::slice::from_ref(&loader.0),
)
.await
{
Ok(mut project) => {
// Select best file
if let Err(e) = project.select_file(
&lockfile.mc_versions,
std::slice::from_ref(&loader.0),
None, // Use default (1 file) during import
) {
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
e
);
continue;
}
lockfile.add_project(project);
},
Err(e) => {
log::warn!("Failed to fetch project {project_id}: {e}");
},
}
}
}
}
// Create config
let config = Config {
name: index["name"]
.as_str()
.unwrap_or("Imported Pack")
.to_string(),
version: index["versionId"]
.as_str()
.unwrap_or("1.0.0")
.to_string(),
description: index["summary"]
.as_str()
.map(std::string::ToString::to_string),
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::default(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
file_count_preference: None,
};
// Save files using provided paths
lockfile.save(lockfile_dir)?;
config.save(config_dir)?;
log::info!("Imported {} projects", lockfile.projects.len());
// Extract overrides
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = file.enclosed_name().ok_or_else(|| {
PakkerError::InternalError("Invalid file path in archive".to_string())
})?;
if outpath.starts_with("overrides/") {
let Some(target) = outpath.strip_prefix("overrides/").ok() else {
continue;
};
if file.is_dir() {
std::fs::create_dir_all(target)?;
} else {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
let mut outfile = File::create(target)?;
std::io::copy(&mut file, &mut outfile)?;
}
}
}
Ok(())
}
async fn import_curseforge(
path: &Path,
lockfile_dir: &Path,
config_dir: &Path,
) -> Result<()> {
use std::{fs::File, io::Read};
use zip::ZipArchive;
use crate::platform::create_platform;
let file = File::open(path)?;
let mut archive = ZipArchive::new(file)?;
let manifest_content = {
let mut manifest_file = archive.by_name("manifest.json")?;
let mut content = String::new();
manifest_file.read_to_string(&mut content)?;
content
};
let manifest: serde_json::Value = serde_json::from_str(&manifest_content)?;
// Create lockfile
let mc_version = manifest["minecraft"]["version"]
.as_str()
.unwrap_or("1.20.1")
.to_string();
let mod_loaders =
manifest["minecraft"]["modLoaders"]
.as_array()
.ok_or_else(|| {
PakkerError::InvalidImportFile("Missing modLoaders".to_string())
})?;
let loader_info = mod_loaders
.first()
.and_then(|l| l["id"].as_str())
.ok_or_else(|| {
PakkerError::InvalidImportFile("Missing loader id".to_string())
})?;
let parts: Vec<&str> = loader_info.split('-').collect();
let loader_name = (*parts.first().unwrap_or(&"fabric")).to_string();
let loader_version = (*parts.get(1).unwrap_or(&"latest")).to_string();
let mut loaders = std::collections::HashMap::new();
loaders.insert(loader_name, loader_version);
let mut lockfile = LockFile {
target: Some(Target::CurseForge),
mc_versions: vec![mc_version.clone()],
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
// Import projects from files list
if let Some(files) = manifest["files"].as_array() {
log::info!("Importing {} projects from modpack", files.len());
// Create platform client
let curseforge_token = std::env::var("CURSEFORGE_TOKEN").ok();
let platform = create_platform("curseforge", curseforge_token)?;
for file_entry in files {
if let Some(project_id) = file_entry["projectID"].as_u64() {
let project_id_str = project_id.to_string();
log::info!("Fetching project: {project_id_str}");
match platform
.request_project_with_files(
&project_id_str,
&lockfile.mc_versions,
&loaders.keys().cloned().collect::<Vec<_>>(),
)
.await
{
Ok(mut project) => {
// Try to select the specific file if fileID is provided
if let Some(file_id) = file_entry["fileID"].as_u64() {
let file_id_str = file_id.to_string();
// Try to find the file with matching ID
if let Some(file) =
project.files.iter().find(|f| f.id == file_id_str).cloned()
{
project.files = vec![file];
} else {
log::warn!(
"Could not find file {} for project {}, selecting best match",
file_id,
project.get_name()
);
if let Err(e) = project.select_file(
&lockfile.mc_versions,
&loaders.keys().cloned().collect::<Vec<_>>(),
None, // Use default (1 file) during import
) {
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
e
);
continue;
}
}
} else {
// No specific file ID, select best match
if let Err(e) = project.select_file(
&lockfile.mc_versions,
&loaders.keys().cloned().collect::<Vec<_>>(),
None, // Use default (1 file) during import
) {
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
e
);
continue;
}
}
lockfile.add_project(project);
},
Err(e) => {
log::warn!("Failed to fetch project {project_id_str}: {e}");
},
}
}
}
}
// Create config
let config = Config {
name: manifest["name"]
.as_str()
.unwrap_or("Imported Pack")
.to_string(),
version: manifest["version"]
.as_str()
.unwrap_or("1.0.0")
.to_string(),
description: None,
author: manifest["author"]
.as_str()
.map(std::string::ToString::to_string),
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::default(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
file_count_preference: None,
};
// Save files using provided paths
lockfile.save(lockfile_dir)?;
config.save(config_dir)?;
log::info!("Imported {} projects", lockfile.projects.len());
// Extract overrides
let overrides_prefix = manifest["overrides"].as_str().unwrap_or("overrides");
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = file.enclosed_name().ok_or_else(|| {
PakkerError::InternalError("Invalid file path in archive".to_string())
})?;
if outpath.starts_with(overrides_prefix) {
let Some(target) = outpath.strip_prefix(overrides_prefix).ok() else {
continue;
};
if file.is_dir() {
std::fs::create_dir_all(target)?;
} else {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
let mut outfile = File::create(target)?;
std::io::copy(&mut file, &mut outfile)?;
}
}
}
Ok(())
}

View file

@ -0,0 +1,196 @@
use std::{collections::HashMap, path::Path};
use crate::{
cli::InitArgs,
error::PakkerError,
model::{Config, LockFile, ResolvedCredentials, Target},
ui_utils::{
prompt_curseforge_api_key,
prompt_input,
prompt_select,
prompt_yes_no,
},
};
pub fn execute(
args: InitArgs,
global_yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<(), PakkerError> {
let skip_prompts = global_yes;
if lockfile_path.exists() {
return Err(PakkerError::AlreadyExists(
"Lock file already exists".into(),
));
}
// Interactive mode: prompt for values not provided via CLI and --yes not set
let is_interactive = !skip_prompts && args.name.is_none();
// Get modpack name
let name = if let Some(name) = args.name.clone() {
name
} else if is_interactive {
prompt_input("Modpack name", Some("My Modpack"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
} else {
"My Modpack".to_string()
};
// Get modpack version
let version = if let Some(version) = args.version.clone() {
version
} else if is_interactive {
prompt_input("Version", Some("1.0.0"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
} else {
"1.0.0".to_string()
};
// Get target platform
let target = if let Some(target) = args.target.clone() {
target
} else if is_interactive {
let targets = ["multiplatform", "curseforge", "modrinth"];
let idx = prompt_select("Target platform", &targets)
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
targets[idx].to_string()
} else {
"multiplatform".to_string()
};
let target_enum = match target.as_str() {
"curseforge" => Target::CurseForge,
"modrinth" => Target::Modrinth,
"multiplatform" => Target::Multiplatform,
_ => {
return Err(PakkerError::InvalidInput(format!(
"Invalid target: {target}"
)));
},
};
// Get Minecraft versions (supports multiple)
let mc_versions = if let Some(versions) = args.mc_versions.clone() {
versions
} else if is_interactive {
let input =
prompt_input("Minecraft versions (space-separated)", Some("1.20.1"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
input.split_whitespace().map(String::from).collect()
} else {
vec!["1.20.1".to_string()]
};
// Get mod loaders (supports multiple in name=version format)
let loaders: HashMap<String, String> = if let Some(loader_strs) = args.loaders
{
let mut map = HashMap::new();
for loader_str in loader_strs {
let parts: Vec<&str> = loader_str.splitn(2, '=').collect();
if parts.len() == 2 {
map.insert(parts[0].to_string(), parts[1].to_string());
} else {
// If no version specified, use "latest"
map.insert(loader_str, "latest".to_string());
}
}
map
} else if is_interactive {
let loader_options = ["fabric", "forge", "neoforge", "quilt"];
let idx = prompt_select("Mod loader", &loader_options)
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
let loader = loader_options[idx].to_string();
let loader_version = prompt_input("Loader version", Some("latest"))
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?;
let mut map = HashMap::new();
map.insert(loader, loader_version);
map
} else {
let mut map = HashMap::new();
map.insert("fabric".to_string(), "latest".to_string());
map
};
let lockfile = LockFile {
target: Some(target_enum),
mc_versions,
loaders,
projects: Vec::new(),
lockfile_version: 2,
};
// Save expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
lockfile.save(lockfile_dir)?;
let config = Config {
name: name.clone(),
version: version.clone(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
file_count_preference: None,
};
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
config.save(config_dir)?;
println!("Initialized new modpack '{name}' v{version}");
println!(" Target: {target}");
println!(" Minecraft: {}", lockfile.mc_versions.join(", "));
println!(
" Loaders: {}",
lockfile
.loaders
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join(", ")
);
// Check if CurseForge API key is needed and prompt if interactive
if is_interactive && (target == "curseforge" || target == "multiplatform") {
let credentials = ResolvedCredentials::load();
let has_cf_key = credentials.curseforge_api_key().is_some();
if !has_cf_key {
println!();
if prompt_yes_no(
"Would you like to set up CurseForge API key now?",
true,
skip_prompts,
)
.map_err(|e| PakkerError::InvalidInput(e.to_string()))?
&& let Ok(Some(api_key)) = prompt_curseforge_api_key(skip_prompts)
{
// Save to credentials file
let creds_path = std::env::var("HOME").map_or_else(
|_| Path::new(".pakku").to_path_buf(),
|h| Path::new(&h).join(".pakku"),
);
std::fs::create_dir_all(&creds_path).ok();
let creds_file = creds_path.join("credentials");
let content =
format!("# Pakku/Pakker credentials\nCURSEFORGE_API_KEY={api_key}\n");
if std::fs::write(&creds_file, content).is_ok() {
println!("CurseForge API key saved to ~/.pakku/credentials");
}
}
}
}
Ok(())
}

View file

@ -0,0 +1,603 @@
use std::{collections::HashSet, path::Path};
use comfy_table::{Cell, Color, ContentArrangement, Table, presets};
use strsim::levenshtein;
use yansi::Paint;
use crate::{
error::Result,
model::{Config, LockFile, Project, ProjectFile},
};
pub fn execute(
projects: &[String],
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
let _config = Config::load(config_dir)?;
let mut found_any = false;
let total_projects = projects.len();
for (idx, project_input) in projects.iter().enumerate() {
if let Some(project) = find_project(&lockfile, project_input) {
display_project_inspection(project, &lockfile)?;
found_any = true;
// Add separator between projects (but not after the last one)
if idx < total_projects - 1 {
let width = 80; // Default terminal width
println!("{}", "".repeat(width));
println!();
}
} else {
eprintln!(
"{}: {}",
"Error".red(),
format!("Project '{project_input}' not found in lockfile.").red()
);
// Suggest similar projects
if let Some(suggestions) =
find_similar_projects(&lockfile, project_input, 5)
{
eprintln!();
eprintln!("{}", "Did you mean one of these?".yellow());
for suggestion in suggestions {
eprintln!(" - {}", suggestion.cyan());
}
}
eprintln!();
}
}
if !found_any && !projects.is_empty() {
return Err(crate::error::PakkerError::ProjectNotFound(
"No projects found".to_string(),
));
}
Ok(())
}
fn find_project<'a>(
lockfile: &'a LockFile,
query: &str,
) -> Option<&'a Project> {
lockfile.projects.iter().find(|p| project_matches(p, query))
}
fn project_matches(project: &Project, query: &str) -> bool {
// Check slugs
for slug in project.slug.values() {
if slug.eq_ignore_ascii_case(query) {
return true;
}
}
// Check names
for name in project.name.values() {
if name.eq_ignore_ascii_case(query) {
return true;
}
}
// Check pakku_id
if let Some(ref pakku_id) = project.pakku_id
&& pakku_id.eq_ignore_ascii_case(query)
{
return true;
}
// Check aliases
for alias in &project.aliases {
if alias.eq_ignore_ascii_case(query) {
return true;
}
}
false
}
fn find_similar_projects(
lockfile: &LockFile,
query: &str,
max_results: usize,
) -> Option<Vec<String>> {
// Calculate similarity scores for all projects
let mut candidates: Vec<(String, usize)> = lockfile
.projects
.iter()
.flat_map(|p| {
let mut scores = Vec::new();
// Check slug similarity
for slug in p.slug.values() {
let distance = levenshtein(slug, query);
if distance <= 3 {
scores.push((slug.clone(), distance));
}
}
// Check name similarity (case-insensitive)
for name in p.name.values() {
let distance = levenshtein(&name.to_lowercase(), &query.to_lowercase());
if distance <= 3 {
scores.push((name.clone(), distance));
}
}
// Check aliases
for alias in &p.aliases {
let distance = levenshtein(alias, query);
if distance <= 3 {
scores.push((alias.clone(), distance));
}
}
scores
})
.collect();
if candidates.is_empty() {
return None;
}
// Sort by distance (closest first)
candidates.sort_by_key(|(_, dist)| *dist);
// Deduplicate and take top N
let mut seen = HashSet::new();
let suggestions: Vec<String> = candidates
.into_iter()
.filter_map(|(name, _)| {
if seen.insert(name.clone()) {
Some(name)
} else {
None
}
})
.take(max_results)
.collect();
Some(suggestions)
}
fn display_project_inspection(
project: &Project,
lockfile: &LockFile,
) -> Result<()> {
// Display project header panel
display_project_header(project);
// Display project files
println!();
display_project_files(&project.files, project);
// Display properties
println!();
display_properties(project);
// Display dependency tree
println!();
display_dependencies(project, lockfile)?;
println!();
Ok(())
}
fn display_project_header(project: &Project) {
let name = get_project_name(project);
let default_slug = String::from("N/A");
let slug = project.slug.values().next().unwrap_or(&default_slug);
// Create header table with comfy-table
let mut table = Table::new();
table
.load_preset(presets::UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
// Title row with name
table.add_row(vec![
Cell::new(name)
.fg(Color::Cyan)
.set_alignment(comfy_table::CellAlignment::Left),
]);
// Second row with slug, type, side
let metadata = format!(
"{} ({}) • {} • {}",
slug,
project.id.keys().next().map_or("unknown", String::as_str),
format!("{:?}", project.r#type).to_lowercase(),
format!("{:?}", project.side).to_lowercase()
);
table.add_row(vec![
Cell::new(metadata)
.fg(Color::DarkGrey)
.set_alignment(comfy_table::CellAlignment::Left),
]);
println!("{table}");
}
fn display_project_files(files: &[ProjectFile], project: &Project) {
if files.is_empty() {
println!("{}", "No files available".yellow());
return;
}
println!("{}", "Project Files".cyan().bold());
for (idx, file) in files.iter().enumerate() {
let mut table = Table::new();
table
.load_preset(presets::UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
// Mark the first file as "current"
let status = if idx == 0 { "current" } else { "" };
let status_text = if status.is_empty() {
String::new()
} else {
format!(" {status}")
};
// File path line with optional site URL
let file_path = format!("{}={}", file.file_type, file.file_name);
let file_display = file.get_site_url(project).map_or_else(
|| format!("{file_path}:{status_text}"),
|site_url| {
// Create hyperlink for the file
let hyperlink = crate::ui_utils::hyperlink(&site_url, &file_path);
format!("{hyperlink}:{status_text}")
},
);
table.add_row(vec![Cell::new(file_display).fg(if idx == 0 {
Color::Green
} else {
Color::White
})]);
// Date published
table.add_row(vec![Cell::new(&file.date_published).fg(Color::DarkGrey)]);
// Show site URL if available (for non-hyperlink terminals)
if let Some(site_url) = file.get_site_url(project) {
table
.add_row(vec![Cell::new(format!("URL: {site_url}")).fg(Color::Blue)]);
}
// Empty line
table.add_row(vec![Cell::new("")]);
// Hashes (truncated)
if !file.hashes.is_empty() {
for (hash_type, hash_value) in &file.hashes {
let display_hash = if hash_value.len() > 32 {
format!(
"{}...{}",
&hash_value[..16],
&hash_value[hash_value.len() - 16..]
)
} else {
hash_value.clone()
};
table.add_row(vec![
Cell::new(format!("{hash_type}={display_hash}")).fg(Color::DarkGrey),
]);
}
}
println!("{table}");
println!();
}
}
fn display_properties(project: &Project) {
println!("{}", "Properties".cyan().bold());
println!(
" {}={}",
"type".yellow(),
format!("{:?}", project.r#type).to_lowercase()
);
println!(
" {}={}",
"side".yellow(),
format!("{:?}", project.side).to_lowercase()
);
println!(
" {}={}",
"update_strategy".yellow(),
format!("{:?}", project.update_strategy).to_lowercase()
);
println!(
" {}={}",
"redistributable".yellow(),
project.redistributable
);
if let Some(subpath) = &project.subpath {
println!(" {}={}", "subpath".yellow(), subpath);
}
if !project.aliases.is_empty() {
let aliases: Vec<_> = project.aliases.iter().cloned().collect();
println!(" {}={}", "aliases".yellow(), aliases.join(", "));
}
}
fn display_dependencies(project: &Project, lockfile: &LockFile) -> Result<()> {
println!("{}", "Dependencies".cyan().bold());
// Collect all dependencies from all files
let mut all_deps = HashSet::new();
for file in &project.files {
for dep in &file.required_dependencies {
all_deps.insert(dep.clone());
}
}
if all_deps.is_empty() {
println!(" {}", "No dependencies".dim());
return Ok(());
}
// Display dependency tree
let mut visited = HashSet::new();
for dep_id in all_deps {
display_dependency_tree(&dep_id, lockfile, 1, &mut visited)?;
}
Ok(())
}
fn display_dependency_tree(
dep_id: &str,
lockfile: &LockFile,
depth: usize,
visited: &mut HashSet<String>,
) -> Result<()> {
let indent = " ".repeat(depth);
let tree_char = if depth == 1 { "└─" } else { "├─" };
// Find the project in lockfile
let project = lockfile.projects.iter().find(|p| {
// Check if any ID matches
p.id.values().any(|id| id == dep_id)
|| p.slug.values().any(|slug| slug == dep_id)
|| p.pakku_id.as_ref() == Some(&dep_id.to_string())
});
if let Some(proj) = project {
let name = get_project_name(proj);
// Check for circular dependency
if visited.contains(&name) {
println!("{}{} {} {}", indent, tree_char, name, "(circular)".red());
return Ok(());
}
println!("{}{} {} (required)", indent, tree_char, name.green());
visited.insert(name);
// Recursively display nested dependencies (limit depth to avoid infinite
// loops)
if depth < 5 {
for file in &proj.files {
for nested_dep in &file.required_dependencies {
display_dependency_tree(nested_dep, lockfile, depth + 1, visited)?;
}
}
}
} else {
// Dependency not found in lockfile
println!(
"{}{} {} {}",
indent,
tree_char,
dep_id,
"(not in lockfile)".yellow()
);
}
Ok(())
}
fn get_project_name(project: &Project) -> String {
project
.name
.values()
.next()
.or_else(|| project.slug.values().next())
.cloned()
.unwrap_or_else(|| "Unknown".to_string())
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::*;
use crate::model::enums::{
ProjectSide,
ProjectType,
ReleaseType,
UpdateStrategy,
};
fn create_test_project(pakku_id: &str, slug: &str, name: &str) -> Project {
let mut slug_map = HashMap::new();
slug_map.insert("modrinth".to_string(), slug.to_string());
let mut name_map = HashMap::new();
name_map.insert("modrinth".to_string(), name.to_string());
let mut id_map = HashMap::new();
id_map.insert("modrinth".to_string(), pakku_id.to_string());
Project {
pakku_id: Some(pakku_id.to_string()),
pakku_links: HashSet::new(),
r#type: ProjectType::Mod,
side: ProjectSide::Both,
slug: slug_map,
name: name_map,
id: id_map,
update_strategy: UpdateStrategy::Latest,
redistributable: true,
subpath: None,
aliases: HashSet::new(),
export: true,
files: vec![],
}
}
fn create_test_lockfile(projects: Vec<Project>) -> LockFile {
use crate::model::enums::Target;
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
for project in projects {
lockfile.add_project(project);
}
lockfile
}
#[test]
fn test_find_project_by_slug() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "test-slug");
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
}
#[test]
fn test_find_project_by_name() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "test mod"); // Case-insensitive
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
}
#[test]
fn test_find_project_by_pakku_id() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "test-id");
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
}
#[test]
fn test_find_project_not_found() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "nonexistent");
assert!(found.is_none());
}
#[test]
fn test_fuzzy_matching_close_match() {
let project1 = create_test_project("id1", "fabric-api", "Fabric API");
let project2 = create_test_project("id2", "sodium", "Sodium");
let lockfile = create_test_lockfile(vec![project1, project2]);
// Typo: "fabrc-api" should suggest "fabric-api"
let suggestions = find_similar_projects(&lockfile, "fabrc-api", 5);
assert!(suggestions.is_some());
let suggestions = suggestions.unwrap();
assert!(!suggestions.is_empty());
assert!(suggestions.contains(&"fabric-api".to_string()));
}
#[test]
fn test_fuzzy_matching_no_match() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
// Very different query, should have no suggestions (distance > 3)
let suggestions =
find_similar_projects(&lockfile, "completely-different-xyz", 5);
assert!(suggestions.is_none() || suggestions.unwrap().is_empty());
}
#[test]
fn test_project_matches_alias() {
let mut project = create_test_project("test-id", "test-slug", "Test Mod");
project.aliases.insert("test-alias".to_string());
assert!(project_matches(&project, "test-alias"));
}
#[test]
fn test_circular_dependency_detection() {
// This is a conceptual test - in practice, we'd need to set up files with
// dependencies
let mut project1 = create_test_project("dep1", "dep1-slug", "Dependency 1");
let mut project2 = create_test_project("dep2", "dep2-slug", "Dependency 2");
// Create files with circular dependencies
let file1 = ProjectFile {
file_type: "modrinth".to_string(),
file_name: "dep1.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/dep1.jar".to_string(),
id: "file1".to_string(),
parent_id: "dep1".to_string(),
hashes: HashMap::new(),
required_dependencies: vec!["dep2".to_string()],
size: 1000,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let file2 = ProjectFile {
file_type: "modrinth".to_string(),
file_name: "dep2.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/dep2.jar".to_string(),
id: "file2".to_string(),
parent_id: "dep2".to_string(),
hashes: HashMap::new(),
required_dependencies: vec!["dep1".to_string()],
size: 1000,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
project1.files.push(file1);
project2.files.push(file2);
let lockfile = create_test_lockfile(vec![project1, project2]);
// Test that display_dependency_tree handles circular deps gracefully
let mut visited = HashSet::new();
let result = display_dependency_tree("dep1", &lockfile, 1, &mut visited);
assert!(result.is_ok());
}
}

View file

@ -0,0 +1,47 @@
use std::path::Path;
use crate::{
cli::LinkArgs,
error::{PakkerError, Result},
model::LockFile,
};
pub fn execute(args: &LinkArgs, lockfile_path: &Path) -> Result<()> {
log::info!("Linking {} -> {}", args.from, args.to);
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Find projects
let from_project = lockfile
.projects
.iter()
.find(|p| p.matches_input(&args.from))
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
PakkerError::InvalidProject("From project has no pakku_id".to_string())
})?;
let to_project = lockfile
.projects
.iter_mut()
.find(|p| p.matches_input(&args.to))
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
// Check if link already exists
if to_project.pakku_links.contains(&from_id) {
log::info!("Link already exists");
return Ok(());
}
// Add link
to_project.pakku_links.insert(from_id);
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully linked projects");
Ok(())
}

View file

@ -0,0 +1,94 @@
use std::path::Path;
use crate::{cli::LsArgs, error::Result, model::LockFile};
/// Truncate a name to fit within `max_len` characters, adding "..." if
/// truncated
fn truncate_name(name: &str, max_len: usize) -> String {
if name.len() <= max_len {
name.to_string()
} else if max_len > 3 {
format!("{}...", &name[..max_len - 3])
} else {
name[..max_len].to_string()
}
}
pub fn execute(args: &LsArgs, lockfile_path: &Path) -> Result<()> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
if lockfile.projects.is_empty() {
println!("No projects installed");
return Ok(());
}
println!("Installed projects ({}):", lockfile.projects.len());
println!();
// Calculate max name length for alignment
let max_name_len = args.name_max_length.unwrap_or_else(|| {
lockfile
.projects
.iter()
.map(|p| p.get_name().len())
.max()
.unwrap_or(20)
.min(50)
});
for project in &lockfile.projects {
// Check for version mismatch across providers
let version_warning = if project.versions_match_across_providers() {
""
} else {
// Use the detailed check_version_mismatch for logging
if let Some(mismatch_detail) = project.check_version_mismatch() {
log::warn!("{mismatch_detail}");
}
" [!] versions do not match across providers"
};
if args.detailed {
let id = project.pakku_id.as_deref().unwrap_or("unknown");
let name = truncate_name(&project.get_name(), max_name_len);
println!(" {name} ({id}){version_warning}");
println!(" Type: {:?}", project.r#type);
println!(" Side: {:?}", project.side);
if let Some(file) = project.files.first() {
println!(" File: {}", file.file_name);
println!(
" Version: {} ({})",
file.release_type, file.date_published
);
}
// Show version details if there's a mismatch
if !version_warning.is_empty() {
println!(" Provider versions:");
for file in &project.files {
println!(" {}: {}", file.file_type, file.file_name);
}
}
if !project.pakku_links.is_empty() {
println!(" Dependencies: {}", project.pakku_links.len());
}
println!();
} else {
let name = truncate_name(&project.get_name(), max_name_len);
let file_info = project
.files
.first()
.map(|f| format!(" ({})", f.file_name))
.unwrap_or_default();
println!(" {name}{file_info}{version_warning}");
}
}
Ok(())
}

View file

@ -0,0 +1,24 @@
pub mod add;
pub mod add_prj;
pub mod cfg;
pub mod cfg_prj;
pub mod credentials;
pub mod credentials_set;
pub mod credentials_test;
pub mod diff;
pub mod export;
pub mod fetch;
pub mod fork;
pub mod import;
pub mod init;
pub mod inspect;
pub mod link;
pub mod ls;
pub mod remote;
pub mod remote_update;
pub mod rm;
pub mod set;
pub mod status;
pub mod sync;
pub mod unlink;
pub mod update;

View file

@ -0,0 +1,151 @@
use std::{
fs,
path::{Path, PathBuf},
};
use crate::{
cli::RemoteArgs,
error::{PakkerError, Result},
fetch::Fetcher,
git,
model::{config::Config, lockfile::LockFile},
};
const REMOTE_DIR: &str = ".pakku-remote";
pub async fn execute(args: RemoteArgs) -> Result<()> {
let remote_path = PathBuf::from(REMOTE_DIR);
// Handle --remove flag
if args.remove {
if remote_path.exists() {
fs::remove_dir_all(&remote_path)?;
log::info!("Removed remote from modpack");
} else {
log::warn!("No remote configured");
}
return Ok(());
}
// If no URL provided, show status
if args.url.is_none() {
show_remote_status(&remote_path);
return Ok(());
}
let url = args
.url
.ok_or_else(|| PakkerError::InvalidInput("URL is required".to_string()))?;
log::info!("Installing modpack from: {url}");
// Clone or update repository
if remote_path.exists() {
log::info!("Remote directory exists, updating...");
let remote_name = "origin";
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
git::fetch_updates(&remote_path, remote_name, ref_name, None)?;
git::reset_to_ref(&remote_path, remote_name, ref_name)?;
} else {
log::info!("Cloning repository...");
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
git::clone_repository(&url, &remote_path, ref_name, None)?;
}
// Load lockfile and config from remote
let remote_lockfile_path = remote_path.join("pakku-lock.json");
if !remote_lockfile_path.exists() {
return Err(PakkerError::ConfigError(
"Remote repository does not contain pakku-lock.json".to_string(),
));
}
let remote_lockfile = LockFile::load(&remote_path)?;
let remote_config = Config::load(&remote_path).ok();
// Copy lockfile to current directory
let current_lockfile_path = PathBuf::from("pakku-lock.json");
fs::copy(&remote_lockfile_path, &current_lockfile_path)?;
log::info!("Copied lockfile from remote");
// Copy config if exists
if remote_config.is_some() {
let remote_config_path = remote_path.join("pakku.json");
let current_config_path = PathBuf::from("pakku.json");
if remote_config_path.exists() {
fs::copy(&remote_config_path, &current_config_path)?;
log::info!("Copied config from remote");
}
}
// Fetch project files
log::info!("Fetching project files...");
let fetcher = Fetcher::new(&remote_path);
fetcher
.fetch_all(&remote_lockfile, &remote_config.unwrap_or_default())
.await?;
// Sync overrides
sync_overrides(&remote_path, args.server_pack)?;
log::info!("Successfully installed modpack from remote");
Ok(())
}
fn show_remote_status(remote_path: &Path) {
if !remote_path.exists() {
println!("No remote configured");
return;
}
println!("Remote status:");
println!(" Directory: {}", remote_path.display());
if git::is_git_repository(remote_path) {
if let Ok(url) = git::get_remote_url(remote_path, "origin") {
println!(" URL: {url}");
}
if let Ok(sha) = git::get_current_commit_sha(remote_path, None) {
println!(" Commit: {}", &sha[..8]);
}
}
}
fn sync_overrides(remote_path: &Path, server_pack: bool) -> Result<()> {
let override_dirs = if server_pack {
vec!["overrides", "server_overrides"]
} else {
vec!["overrides", "client_overrides"]
};
for dir_name in override_dirs {
let src_dir = remote_path.join(dir_name);
if src_dir.exists() && src_dir.is_dir() {
log::info!("Syncing {dir_name} directory...");
copy_dir_recursive(&src_dir, Path::new("."))?;
}
}
Ok(())
}
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
if !dst.exists() {
fs::create_dir_all(dst)?;
}
for entry in fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let file_name = entry.file_name();
let dst_path = dst.join(file_name);
if src_path.is_dir() {
copy_dir_recursive(&src_path, &dst_path)?;
} else {
fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}

View file

@ -0,0 +1,121 @@
use std::path::{Path, PathBuf};
use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config};
/// Update modpack from remote Git repository
///
/// This command updates the current modpack from its remote Git repository.
/// It fetches the latest changes from the remote and syncs overrides.
pub fn execute(args: &RemoteUpdateArgs) -> Result<(), PakkerError> {
// Check if lockfile exists in current directory - if it does, we're in a
// modpack directory and should not update remote (use regular update
// instead)
let lockfile_path = PathBuf::from("pakku-lock.json");
if lockfile_path.exists() {
return Err(PakkerError::InvalidInput(
"Cannot update remote from a modpack directory. Use 'update' command \
instead."
.to_string(),
));
}
// Remote directory for the cloned modpack
let remote_dir = PathBuf::from(".pakku-remote");
// Check if remote directory exists
if !remote_dir.exists() {
return Err(PakkerError::RemoteNotFound(
"No remote found. Use 'remote' command to install a modpack first."
.to_string(),
));
}
// Fetch updates from remote repository
println!("Updating remote repository...");
let remote_name = "origin";
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
git::fetch_updates(&remote_dir, remote_name, ref_name, None)?;
// Read remote lockfile
let remote_lockfile_path = remote_dir.join("pakku-lock.json");
if !remote_lockfile_path.exists() {
return Err(PakkerError::FileNotFound(
"Remote lockfile not found".to_string(),
));
}
// Read remote config if it exists
let remote_config_path = remote_dir.join("pakku.json");
let _remote_config = if remote_config_path.exists() {
match Config::load(&remote_config_path) {
Ok(config) => Some(config),
Err(e) => {
eprintln!("Warning: Could not read remote config: {e}");
None
},
}
} else {
None
};
// Sync overrides from remote directory
println!("Syncing overrides...");
sync_overrides(&remote_dir)?;
// Clean up remote directory
std::fs::remove_dir_all(&remote_dir)?;
println!("Remote modpack updated successfully.");
Ok(())
}
/// Sync override files from remote directory to current directory
fn sync_overrides(remote_dir: &Path) -> Result<(), PakkerError> {
let remote_config_path = remote_dir.join("pakku.json");
if !remote_config_path.exists() {
return Ok(());
}
let config = Config::load(&remote_config_path)?;
// Get override directories from config
let overrides = config.overrides;
if overrides.is_empty() {
return Ok(());
}
for override_path in overrides {
let source = remote_dir.join(&override_path);
let dest = PathBuf::from(&override_path);
if source.exists() {
// Copy override directory
copy_directory(&source, &dest)?;
println!(" Synced: {override_path}");
}
}
Ok(())
}
/// Recursively copy a directory
fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<(), PakkerError> {
if !dest.exists() {
std::fs::create_dir_all(dest)?;
}
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let path = entry.path();
let dest_path = dest.join(entry.file_name());
if path.is_dir() {
copy_directory(&path, &dest_path)?;
} else {
std::fs::copy(&path, &dest_path)?;
}
}
Ok(())
}

View file

@ -0,0 +1,160 @@
use std::path::Path;
use crate::{
cli::RmArgs,
error::{PakkerError, Result},
model::LockFile,
ui_utils::{prompt_typo_suggestion, prompt_yes_no},
};
pub fn execute(
args: &RmArgs,
global_yes: bool,
lockfile_path: &Path,
_config_path: &Path,
) -> Result<()> {
let skip_prompts = global_yes;
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Determine which projects to remove
let inputs: Vec<String> = if args.all {
log::info!("Removing all projects from lockfile");
lockfile
.projects
.iter()
.filter_map(|p| {
p.pakku_id
.clone()
.or_else(|| p.slug.values().next().cloned())
})
.collect()
} else {
args.inputs.clone()
};
if inputs.is_empty() {
return if args.all {
Err(PakkerError::ProjectNotFound(
"No projects found in lockfile".to_string(),
))
} else {
Err(PakkerError::ProjectNotFound(
"No projects specified".to_string(),
))
};
}
log::info!("Removing projects: {inputs:?}");
let mut removed_count = 0;
let mut removed_ids = Vec::new();
let mut projects_to_remove = Vec::new();
// Collect all known project identifiers for typo suggestions
let all_slugs: Vec<String> = lockfile
.projects
.iter()
.flat_map(|p| {
let mut ids = Vec::new();
if let Some(ref pakku_id) = p.pakku_id {
ids.push(pakku_id.clone());
}
ids.extend(p.slug.values().cloned());
ids.extend(p.name.values().cloned());
ids.extend(p.aliases.iter().cloned());
ids
})
.collect();
// First, identify all projects to remove
let mut resolved_inputs = Vec::new();
for input in &inputs {
// Find project by various identifiers
if lockfile.projects.iter().any(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|| p.aliases.contains(input)
}) {
resolved_inputs.push(input.clone());
} else if !args.all {
// Try typo suggestion
if let Ok(Some(suggestion)) =
prompt_typo_suggestion(input, &all_slugs, skip_prompts)
{
log::info!("Using suggested project: {suggestion}");
resolved_inputs.push(suggestion);
} else {
log::warn!("Project not found: {input}");
}
}
}
// Now find the actual projects from resolved inputs
for input in &resolved_inputs {
if let Some(project) = lockfile.projects.iter().find(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|| p.aliases.contains(input)
}) {
projects_to_remove.push(project.get_name());
}
}
// Replace inputs with resolved_inputs for actual removal
let inputs = resolved_inputs;
if projects_to_remove.is_empty() {
return Err(PakkerError::ProjectNotFound(
"None of the specified projects found".to_string(),
));
}
// Ask for confirmation unless --yes flag is provided or --all with no
// projects
if !skip_prompts {
println!("The following projects will be removed:");
for name in &projects_to_remove {
println!(" - {name}");
}
if !prompt_yes_no("Do you want to continue?", false, skip_prompts)? {
println!("Removal cancelled.");
return Ok(());
}
}
// Now actually remove the projects
for input in &inputs {
if let Some(pos) = lockfile.projects.iter().position(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|| p.aliases.contains(input)
}) {
let project = lockfile.projects.remove(pos);
log::info!("Removed: {}", project.get_name());
if let Some(pakku_id) = project.pakku_id.clone() {
removed_ids.push(pakku_id);
}
removed_count += 1;
}
}
// Clean up pakku_links from all remaining projects
for project in &mut lockfile.projects {
project
.pakku_links
.retain(|link| !removed_ids.contains(link));
}
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully removed {removed_count} project(s)");
Ok(())
}

View file

@ -0,0 +1,156 @@
use std::{collections::HashMap, path::Path, str::FromStr};
use crate::{
cli::SetArgs,
error::PakkerError,
model::{Config, LockFile, ProjectSide, ProjectType, Target, UpdateStrategy},
};
pub fn execute(
args: &SetArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<(), PakkerError> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Check if we're modifying lockfile properties or project properties
let is_lockfile_modification = args.target.is_some()
|| args.mc_versions.is_some()
|| args.loaders.is_some();
if is_lockfile_modification {
// Modify lockfile properties
if let Some(target_str) = &args.target {
let target = Target::from_str(target_str).map_err(|e| {
PakkerError::InvalidInput(format!("Invalid target: {e}"))
})?;
lockfile.target = Some(target);
println!("Set target to: {target:?}");
}
if let Some(mc_versions_str) = &args.mc_versions {
let mc_versions: Vec<String> = mc_versions_str
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
if mc_versions.is_empty() {
return Err(PakkerError::InvalidInput(
"At least one Minecraft version is required".to_string(),
));
}
// Validate that all projects are compatible with new MC versions
for project in &lockfile.projects {
let compatible = project
.files
.iter()
.any(|file| file.mc_versions.iter().any(|v| mc_versions.contains(v)));
if !compatible {
eprintln!(
"Warning: Project '{}' has no files compatible with new MC \
versions",
project.get_name()
);
}
}
lockfile.mc_versions.clone_from(&mc_versions);
println!("Set Minecraft versions to: {mc_versions:?}");
}
if let Some(loaders_str) = &args.loaders {
let mut loaders: HashMap<String, String> = HashMap::new();
for pair in loaders_str.split(',') {
let parts: Vec<&str> = pair.split('=').collect();
if parts.len() != 2 {
return Err(PakkerError::InvalidInput(format!(
"Invalid loader format '{pair}'. Expected 'name=version'"
)));
}
loaders
.insert(parts[0].trim().to_string(), parts[1].trim().to_string());
}
if loaders.is_empty() {
return Err(PakkerError::InvalidInput(
"At least one loader is required".to_string(),
));
}
let loader_names: Vec<String> = loaders.keys().cloned().collect();
// Validate that all projects are compatible with new loaders
for project in &lockfile.projects {
let compatible = project.files.iter().any(|file| {
file.loaders.is_empty()
|| file.loaders.iter().any(|l| loader_names.contains(l))
});
if !compatible {
eprintln!(
"Warning: Project '{}' has no files compatible with new loaders",
project.get_name()
);
}
}
lockfile.loaders.clone_from(&loaders);
println!("Set loaders to: {loaders:?}");
}
lockfile.save(lockfile_dir)?;
println!("Lockfile properties updated successfully");
} else if let Some(input) = &args.input {
// Modify project properties
let project_name = {
let project = lockfile
.projects
.iter_mut()
.find(|p| p.matches_input(input))
.ok_or_else(|| PakkerError::ProjectNotFound(input.clone()))?;
if let Some(type_str) = &args.r#type {
let project_type =
ProjectType::from_str(type_str).map_err(PakkerError::InvalidInput)?;
project.r#type = project_type;
}
if let Some(side_str) = &args.side {
let side =
ProjectSide::from_str(side_str).map_err(PakkerError::InvalidInput)?;
project.side = side;
}
if let Some(strategy_str) = &args.strategy {
let strategy = UpdateStrategy::from_str(strategy_str)
.map_err(PakkerError::InvalidInput)?;
project.update_strategy = strategy;
}
if let Some(redistributable) = args.redistributable {
project.redistributable = redistributable;
}
project.get_name()
};
lockfile.save(lockfile_dir)?;
config.save(config_dir)?;
println!("Updated project: {project_name}");
} else {
return Err(PakkerError::InvalidInput(
"Either provide a project identifier or lockfile properties to modify"
.to_string(),
));
}
Ok(())
}

View file

@ -0,0 +1,400 @@
use std::{collections::HashMap, path::Path, sync::Arc};
use futures::stream::{FuturesUnordered, StreamExt};
use indicatif::{ProgressBar, ProgressStyle};
use tokio::sync::Semaphore;
use yansi::Paint;
use crate::{
error::{ErrorSeverity, Result},
model::{Config, LockFile, Project},
platform::create_platform,
};
pub async fn execute(
parallel: bool,
skip_prompts: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Display modpack metadata
display_modpack_info(&lockfile, &config);
println!();
// Check for updates (sequential or parallel)
let (updates, errors) = if parallel {
check_updates_parallel(&lockfile).await?
} else {
check_updates_sequential(&lockfile).await?
};
// Display results
display_update_results(&updates);
// Display errors if any, categorized by severity
if !errors.is_empty() {
println!();
// Categorize errors by severity
let (warnings, errors_only): (Vec<_>, Vec<_>) =
errors.iter().partition(|(_, err)| {
// Network errors and "not found" are warnings (non-fatal)
err.contains("Failed to check") || err.contains("not found")
});
// Display warnings (ErrorSeverity::Warning)
if !warnings.is_empty() {
let severity = ErrorSeverity::Warning;
println!("{}", format_severity_header(severity, "Warnings"));
for (project, error) in &warnings {
println!(" - {}: {}", project.yellow(), error.dim());
}
}
// Display errors (ErrorSeverity::Error)
if !errors_only.is_empty() {
let severity = ErrorSeverity::Error;
println!("{}", format_severity_header(severity, "Errors"));
for (project, error) in &errors_only {
println!(" - {}: {}", project.yellow(), error.red());
}
}
// Log info level summary
log::info!(
"Update check completed with {} warning(s) and {} error(s)",
warnings.len(),
errors_only.len()
);
}
// Prompt to update if there are updates available
if !updates.is_empty() {
println!();
if crate::ui_utils::prompt_yes_no("Update now?", false, skip_prompts)? {
// Call update command programmatically (update all projects)
let update_args = crate::cli::UpdateArgs {
inputs: vec![],
all: true,
};
crate::cli::commands::update::execute(
update_args,
true, // Auto-yes for status command
lockfile_path,
config_path,
)
.await?;
}
}
Ok(())
}
fn display_modpack_info(lockfile: &LockFile, config: &Config) {
let author = config.author.as_deref().unwrap_or("Unknown");
println!(
"Managing {} modpack, version {}, by {}",
config.name.cyan(),
config.version.cyan(),
author.cyan()
);
let mc_versions = lockfile.mc_versions.join(", ");
let loaders: Vec<String> = lockfile
.loaders
.iter()
.map(|(loader, version)| format!("{loader}-{version}"))
.collect();
let loaders_str = loaders.join(", ");
println!(
"on Minecraft version {}, loader {}, targeting platform {:?}.",
mc_versions.cyan(),
loaders_str.cyan(),
lockfile.target
);
}
#[derive(Debug)]
struct ProjectUpdate {
slug: HashMap<String, String>,
name: String,
project_type: String,
side: String,
file_updates: Vec<FileUpdate>,
}
#[derive(Debug)]
struct FileUpdate {
platform: String,
old_filename: String,
new_filename: String,
}
#[expect(
clippy::expect_used,
reason = "progress bar template is a string literal and is always valid"
)]
async fn check_updates_sequential(
lockfile: &LockFile,
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
let total = lockfile.projects.len();
let mut updates = Vec::new();
let mut errors = Vec::new();
// Create progress bar
let pb = ProgressBar::new(total as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.expect("progress bar template is valid")
.progress_chars("#>-"),
);
pb.set_message("Checking for updates...");
for project in &lockfile.projects {
let project_name = project
.name
.values()
.next()
.cloned()
.unwrap_or_else(|| "Unknown".to_string());
pb.set_message(format!("Checking {project_name}..."));
match check_project_update(project, lockfile).await {
Ok(update_opt) => {
if let Some(update) = update_opt {
updates.push(update);
}
},
Err(e) => {
errors.push((project_name.clone(), e.to_string()));
},
}
pb.inc(1);
}
pb.finish_with_message(format!("Checked {total} projects"));
println!(); // Add blank line after progress bar
Ok((updates, errors))
}
#[expect(
clippy::expect_used,
reason = "progress bar template and semaphore acquire are infallible in \
this context"
)]
async fn check_updates_parallel(
lockfile: &LockFile,
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
let total = lockfile.projects.len();
let semaphore = Arc::new(Semaphore::new(10));
let mut futures = FuturesUnordered::new();
// Create progress bar
let pb = Arc::new(ProgressBar::new(total as u64));
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.expect("progress bar template is valid")
.progress_chars("#>-"),
);
pb.set_message("Checking for updates (parallel)...");
for project in &lockfile.projects {
let project = project.clone();
let sem = semaphore.clone();
let pb_clone = pb.clone();
let lockfile_clone = lockfile.clone();
futures.push(async move {
let _permit = sem.acquire().await.expect("semaphore closed unexpectedly");
let result = check_project_update(&project, &lockfile_clone).await;
pb_clone.inc(1);
(project, result)
});
}
let mut updates = Vec::new();
let mut errors = Vec::new();
while let Some((project, result)) = futures.next().await {
match result {
Ok(update_opt) => {
if let Some(update) = update_opt {
updates.push(update);
}
},
Err(e) => {
let project_name = project
.name
.values()
.next()
.cloned()
.unwrap_or_else(|| "Unknown".to_string());
errors.push((project_name, e.to_string()));
},
}
}
pb.finish_with_message(format!("Checked {total} projects"));
println!(); // Add blank line after progress bar
Ok((updates, errors))
}
async fn check_project_update(
project: &Project,
lockfile: &LockFile,
) -> Result<Option<ProjectUpdate>> {
// Get primary slug
let slug = project
.slug
.values()
.next()
.ok_or_else(|| {
crate::error::PakkerError::InvalidProject("No slug found".to_string())
})?
.clone();
// Try each platform in project
for platform_name in project.id.keys() {
let api_key = get_api_key(platform_name);
let Ok(platform) = create_platform(platform_name, api_key) else {
continue;
};
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
if let Ok(updated_project) = platform
.request_project_with_files(&slug, &lockfile.mc_versions, &loaders)
.await
{
// Compare files to detect updates
let file_updates = detect_file_updates(project, &updated_project);
if !file_updates.is_empty() {
return Ok(Some(ProjectUpdate {
slug: project.slug.clone(),
name: project.name.values().next().cloned().unwrap_or_default(),
project_type: format!("{:?}", project.r#type),
side: format!("{:?}", project.side),
file_updates,
}));
}
return Ok(None); // No updates
}
}
Err(crate::error::PakkerError::PlatformApiError(
"Failed to check for updates on any platform".to_string(),
))
}
fn detect_file_updates(
current: &Project,
updated: &Project,
) -> Vec<FileUpdate> {
let mut updates = Vec::new();
for old_file in &current.files {
if let Some(new_file) = updated
.files
.iter()
.find(|f| f.file_type == old_file.file_type)
{
// Check if file ID changed (indicates update)
if new_file.id != old_file.id {
updates.push(FileUpdate {
platform: old_file.file_type.clone(),
old_filename: old_file.file_name.clone(),
new_filename: new_file.file_name.clone(),
});
}
}
}
updates
}
fn display_update_results(updates: &[ProjectUpdate]) {
if updates.is_empty() {
println!("{}", "✓ All projects are up to date".green());
return;
}
println!();
println!("{}", "📦 Updates Available:".cyan().bold());
println!();
for update in updates {
// Create hyperlink for project name using ui_utils
let project_url = if let Some((platform, slug)) = update.slug.iter().next()
{
match platform.as_str() {
"modrinth" => crate::ui_utils::modrinth_project_url(slug),
"curseforge" => crate::ui_utils::curseforge_project_url(slug),
_ => String::new(),
}
} else {
String::new()
};
if project_url.is_empty() {
println!(
"{} ({}, {})",
update.name.yellow(),
update.project_type,
update.side
);
} else {
let hyperlinked = crate::ui_utils::hyperlink(
&project_url,
&update.name.yellow().to_string(),
);
println!("{} ({}, {})", hyperlinked, update.project_type, update.side);
}
for file_update in &update.file_updates {
println!(
" • {}: {} → {}",
file_update.platform.cyan(),
file_update.old_filename.dim(),
file_update.new_filename.green()
);
}
println!();
}
println!(
"{}",
format!("{} project(s) need updates", updates.len()).yellow()
);
}
fn get_api_key(platform: &str) -> Option<String> {
match platform {
"modrinth" => std::env::var("MODRINTH_TOKEN").ok(),
"curseforge" => std::env::var("CURSEFORGE_API_KEY").ok(),
_ => None,
}
}
/// Format severity header with appropriate color
fn format_severity_header(severity: ErrorSeverity, label: &str) -> String {
match severity {
ErrorSeverity::Error => format!("{label}:").red().to_string(),
ErrorSeverity::Warning => format!("{label}:").yellow().to_string(),
ErrorSeverity::Info => format!("{label}:").cyan().to_string(),
}
}

View file

@ -0,0 +1,309 @@
use std::{
collections::{HashMap, HashSet},
fs,
path::{Path, PathBuf},
};
use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::SyncArgs,
error::{PakkerError, Result},
fetch::Fetcher,
model::{Config, LockFile},
platform::{CurseForgePlatform, ModrinthPlatform, PlatformClient},
};
enum SyncChange {
Addition(PathBuf, String), // (file_path, project_name)
Removal(String), // project_pakku_id
}
#[expect(
clippy::expect_used,
reason = "spinner template is a string literal and is always valid"
)]
pub async fn execute(
args: SyncArgs,
global_yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
log::info!("Synchronizing with lockfile");
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Detect changes
let changes = detect_changes(&lockfile, &config);
if changes.is_empty() {
println!("✓ Everything is in sync");
return Ok(());
}
// Filter changes based on flags
let mut additions = Vec::new();
let mut removals = Vec::new();
for change in changes {
match change {
SyncChange::Addition(path, name) => additions.push((path, name)),
SyncChange::Removal(id) => removals.push(id),
}
}
// Apply filters
let no_filter = !args.additions && !args.removals;
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.expect("spinner template is valid"),
);
if no_filter || args.additions {
let mut file_hashes = Vec::new();
for (file_path, _) in &additions {
spinner
.set_message(format!("Processing addition: {}", file_path.display()));
if crate::ui_utils::prompt_yes_no(
&format!("Add {} to lockfile?", file_path.display()),
false,
global_yes,
)? && let Ok(file_data) = fs::read(file_path)
{
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash =
crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
file_hashes.push(FileHash {
path: file_path.clone(),
hash,
});
}
}
if !file_hashes.is_empty() {
let fallback_hashes = file_hashes.clone();
let result = add_files_batch(&mut lockfile, file_hashes).await;
if let Err(e) = result {
log::warn!(
"Batch lookup failed, falling back to individual lookups: {e}"
);
for fh in fallback_hashes {
if let Err(e) =
add_file_to_lockfile(&mut lockfile, &fh.path, &config).await
{
log::warn!("Failed to add {}: {}", fh.path.display(), e);
}
}
}
}
}
if no_filter || args.removals {
for pakku_id in &removals {
if let Some(project) = lockfile
.projects
.iter()
.find(|p| p.pakku_id.as_ref() == Some(pakku_id))
{
let name = project
.name
.values()
.next()
.map(std::string::String::as_str)
.or(project.pakku_id.as_deref())
.unwrap_or("unknown");
spinner.set_message(format!("Processing removal: {name}"));
if crate::ui_utils::prompt_yes_no(
&format!("Remove {name} from lockfile?"),
false,
global_yes,
)? {
lockfile
.remove_project(pakku_id)
.ok_or_else(|| PakkerError::ProjectNotFound(pakku_id.clone()))?;
}
}
}
}
spinner.finish_and_clear();
// Save changes
lockfile.save(lockfile_dir)?;
// Fetch missing files
let fetcher = Fetcher::new(".");
fetcher.sync(&lockfile, &config).await?;
println!("✓ Sync complete");
Ok(())
}
fn detect_changes(lockfile: &LockFile, config: &Config) -> Vec<SyncChange> {
let mut changes = Vec::new();
// Get paths for each project type
let paths = config.paths.clone();
let mods_path = paths
.get("mods")
.map_or("mods", std::string::String::as_str);
// Build map of lockfile projects by file path
let mut lockfile_files: HashMap<PathBuf, String> = HashMap::new();
for project in &lockfile.projects {
for file in &project.files {
let file_path = PathBuf::from(mods_path).join(&file.file_name);
if let Some(ref pakku_id) = project.pakku_id {
lockfile_files.insert(file_path, pakku_id.clone());
}
}
}
// Scan filesystem for additions
if let Ok(entries) = fs::read_dir(mods_path) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file()
&& let Some(ext) = path.extension()
&& ext == "jar"
&& !lockfile_files.contains_key(&path)
{
let name = path
.file_name()
.map(|n| n.to_string_lossy().to_string())
.unwrap_or_default();
changes.push(SyncChange::Addition(path, name));
}
}
}
// Check for removals (projects in lockfile but files missing)
let filesystem_files: HashSet<_> = fs::read_dir(mods_path).map_or_else(
|_| HashSet::new(),
|entries| {
entries
.flatten()
.map(|e| e.path())
.filter(|p| p.is_file())
.collect()
},
);
for (lockfile_path, pakku_id) in &lockfile_files {
if !filesystem_files.contains(lockfile_path) {
changes.push(SyncChange::Removal(pakku_id.clone()));
}
}
changes
}
async fn add_file_to_lockfile(
lockfile: &mut LockFile,
file_path: &Path,
_config: &Config,
) -> Result<()> {
use sha1::Digest;
// Try to identify the file by hash lookup
let modrinth = ModrinthPlatform::new();
let curseforge = CurseForgePlatform::new(None);
// Compute file hash
let file_data = fs::read(file_path)?;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash = crate::utils::hash::hash_to_hex(hasher.finalize().as_slice());
// Try Modrinth first (SHA-1 hash)
if let Ok(Some(project)) = modrinth.lookup_by_hash(&hash).await {
lockfile.add_project(project);
println!("✓ Added {} (from Modrinth)", file_path.display());
return Ok(());
}
// Try CurseForge (Murmur2 hash computed from file)
if let Ok(Some(project)) = curseforge.lookup_by_hash(&hash).await {
lockfile.add_project(project);
println!("✓ Added {} (from CurseForge)", file_path.display());
return Ok(());
}
println!("⚠ Could not identify {}, skipping", file_path.display());
Ok(())
}
#[derive(Clone)]
struct FileHash {
path: PathBuf,
hash: String,
}
async fn add_files_batch(
lockfile: &mut LockFile,
file_hashes: Vec<FileHash>,
) -> Result<()> {
if file_hashes.is_empty() {
return Ok(());
}
let modrinth = ModrinthPlatform::new();
let hashes: Vec<String> =
file_hashes.iter().map(|fh| fh.hash.clone()).collect();
let projects = modrinth
.request_projects_from_hashes(&hashes, "sha1")
.await?;
let mut matched_indices: std::collections::HashSet<usize> =
std::collections::HashSet::new();
let mut added_pakku_ids: std::collections::HashSet<String> =
std::collections::HashSet::new();
for project in &projects {
let pakku_id = match &project.pakku_id {
Some(id) => id.clone(),
None => continue,
};
if added_pakku_ids.contains(&pakku_id) {
continue;
}
for file_info in &project.files {
for (idx, fh) in file_hashes.iter().enumerate() {
if !matched_indices.contains(&idx)
&& file_info
.hashes
.get("sha1")
.map(std::string::String::as_str)
== Some(&fh.hash)
{
lockfile.add_project(project.clone());
added_pakku_ids.insert(pakku_id.clone());
matched_indices.insert(idx);
println!("✓ Added {} (from Modrinth)", fh.path.display());
break;
}
}
}
}
for (idx, fh) in file_hashes.iter().enumerate() {
if matched_indices.contains(&idx) {
continue;
}
println!("⚠ Could not identify {}, skipping", fh.path.display());
}
Ok(())
}

View file

@ -0,0 +1,41 @@
use std::path::Path;
use crate::{
cli::UnlinkArgs,
error::{PakkerError, Result},
model::LockFile,
};
pub fn execute(args: &UnlinkArgs, lockfile_path: &Path) -> Result<()> {
log::info!("Unlinking {} -> {}", args.from, args.to);
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Find projects
let from_project = lockfile
.projects
.iter()
.find(|p| p.matches_input(&args.from))
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
PakkerError::InvalidProject("From project has no pakku_id".to_string())
})?;
let to_project = lockfile
.projects
.iter_mut()
.find(|p| p.matches_input(&args.to))
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
// Remove link
to_project.pakku_links.remove(&from_id);
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully unlinked projects");
Ok(())
}

View file

@ -0,0 +1,265 @@
use std::path::Path;
use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::UpdateArgs,
error::{MultiError, PakkerError},
model::{Config, LockFile, UpdateStrategy},
ui_utils::{prompt_select, prompt_typo_suggestion, prompt_yes_no},
utils::FlexVer,
};
#[expect(
clippy::expect_used,
reason = "progress bar template is a string literal and is always valid"
)]
pub async fn execute(
args: UpdateArgs,
global_yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<(), PakkerError> {
let skip_prompts = global_yes;
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or_else(|| Path::new("."));
let config_dir = config_path.parent().unwrap_or_else(|| Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let _config = Config::load(config_dir)?;
// Create platforms
let platforms = super::add::create_all_platforms();
// Collect all known project identifiers for typo suggestions
let all_slugs: Vec<String> = lockfile
.projects
.iter()
.flat_map(|p| {
let mut ids = Vec::new();
if let Some(ref pakku_id) = p.pakku_id {
ids.push(pakku_id.clone());
}
ids.extend(p.slug.values().cloned());
ids.extend(p.name.values().cloned());
ids.extend(p.aliases.iter().cloned());
ids
})
.collect();
let project_indices: Vec<_> = if args.inputs.is_empty() {
(0..lockfile.projects.len()).collect()
} else {
let mut indices = Vec::new();
for input in &args.inputs {
if let Some((idx, _)) = lockfile
.projects
.iter()
.enumerate()
.find(|(_, p)| p.matches_input(input))
{
indices.push(idx);
} else {
// Try typo suggestion
if let Ok(Some(suggestion)) =
prompt_typo_suggestion(input, &all_slugs, skip_prompts)
&& let Some((idx, _)) = lockfile
.projects
.iter()
.enumerate()
.find(|(_, p)| p.matches_input(&suggestion))
{
log::info!("Using suggested project: {suggestion}");
indices.push(idx);
continue;
}
return Err(PakkerError::ProjectNotFound(input.clone()));
}
}
indices
};
// Capture count before consuming the iterator
let total_projects = project_indices.len();
// Create progress bar
let pb = ProgressBar::new(total_projects as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.expect("progress bar template is valid")
.progress_chars("#>-"),
);
let mut skipped_pinned = 0;
let mut update_errors = MultiError::new();
for idx in project_indices {
let old_project = &lockfile.projects[idx];
// Skip projects with UpdateStrategy::None (pinned)
if old_project.update_strategy == UpdateStrategy::None {
pb.println(format!(
" {} - Skipped (update strategy: NONE)",
old_project.get_name()
));
skipped_pinned += 1;
pb.inc(1);
continue;
}
pb.set_message(format!("Updating {}...", old_project.get_name()));
let slug = old_project
.slug
.values()
.next()
.ok_or_else(|| PakkerError::InvalidProject("No slug found".into()))?;
// Find updated project from one of the platforms
let mut updated_project = None;
for platform in platforms.values() {
if let Ok(project) = platform
.request_project_with_files(
slug,
&lockfile.mc_versions,
&lockfile.loaders.keys().cloned().collect::<Vec<_>>(),
)
.await
{
updated_project = Some(project);
break;
}
}
if updated_project.is_none() {
// Failed to fetch update info from any platform
update_errors.push(PakkerError::PlatformApiError(format!(
"Failed to check updates for '{}'",
old_project.get_name()
)));
pb.inc(1);
continue;
}
if let Some(mut updated_project) = updated_project
&& !updated_project.files.is_empty()
&& let Some(old_file) = lockfile.projects[idx].files.first()
{
// Sort files by FlexVer if that strategy is set
if old_project.update_strategy == UpdateStrategy::FlexVer {
updated_project.files.sort_by(|a, b| {
// Use FlexVer for comparison - b.cmp(a) gives descending order
// (newest first)
FlexVer(&b.file_name).cmp(&FlexVer(&a.file_name))
});
}
// Clone data needed for comparisons to avoid borrow issues
let first_file = updated_project
.files
.first()
.ok_or_else(|| PakkerError::InvalidProject("No files found".into()))?;
let new_file_id = first_file.id.clone();
let new_file_name = first_file.file_name.clone();
let old_file_name = old_file.file_name.clone();
let project_name = old_project.get_name();
if new_file_id == old_file.id {
pb.println(format!(" {project_name} - Already up to date"));
} else {
// Interactive confirmation and version selection if not using --yes
// flag
let mut should_update = skip_prompts || args.all;
let mut selected_idx: Option<usize> = None;
if !skip_prompts && !args.all {
pb.suspend(|| {
// First, confirm the update
let prompt_msg = format!(
"Update '{project_name}' from {old_file_name} to \
{new_file_name}?"
);
should_update =
prompt_yes_no(&prompt_msg, true, skip_prompts).unwrap_or(false);
// If confirmed and multiple versions available, offer selection
if should_update && updated_project.files.len() > 1 {
let choices: Vec<String> = updated_project
.files
.iter()
.map(|f| format!("{} ({})", f.file_name, f.id))
.collect();
let choice_refs: Vec<&str> =
choices.iter().map(std::string::String::as_str).collect();
if let Ok(idx) = prompt_select(
&format!("Select version for {project_name}:"),
&choice_refs,
) {
selected_idx = Some(idx);
}
}
});
}
// Apply file selection outside the closure
if let Some(idx) = selected_idx
&& idx > 0
{
updated_project.files.swap(0, idx);
}
if should_update {
let selected_file =
updated_project.files.first().ok_or_else(|| {
PakkerError::InvalidProject(
"No files found after selection".into(),
)
})?;
pb.println(format!(
" {} -> {}",
old_file_name, selected_file.file_name
));
lockfile.projects[idx] = updated_project;
} else {
pb.println(format!(" {project_name} - Skipped by user"));
}
}
}
pb.inc(1);
}
if skipped_pinned > 0 {
pb.finish_with_message(format!(
"Update complete ({skipped_pinned} pinned projects skipped)"
));
} else {
pb.finish_with_message("Update complete");
}
lockfile.save(lockfile_dir)?;
// Report any errors that occurred during updates
if !update_errors.is_empty() {
let error_list = update_errors.errors();
log::warn!(
"{} project(s) encountered errors during update check",
error_list.len()
);
for err in error_list {
log::warn!(" - {err}");
}
// Extend with any additional collected errors and check if we should fail
let all_errors = update_errors.into_errors();
if all_errors.len() == total_projects {
// All projects failed - return error
let mut multi = MultiError::new();
multi.extend(all_errors);
return multi.into_result(());
}
}
Ok(())
}

View file

@ -0,0 +1,259 @@
#[cfg(test)]
mod tests {
use std::{fs, path::PathBuf};
use tempfile::TempDir;
use crate::{
cli::{ExportArgs, ImportArgs, RmArgs},
model::config::Config,
};
#[test]
fn test_rm_args_parsing_all_flag() {
let args = RmArgs::parse_from(&["pakker", "rm", "--all"]);
assert!(args.all);
assert!(args.inputs.is_empty());
}
#[test]
fn test_rm_args_parsing_multiple_inputs() {
let args = RmArgs::parse_from(&["pakker", "rm", "mod1", "mod2", "mod3"]);
assert!(!args.all);
assert_eq!(args.inputs, vec!["mod1", "mod2", "mod3"]);
}
#[test]
fn test_rm_args_parsing_all_with_yes() {
let args = RmArgs::parse_from(&["pakker", "rm", "--all", "--yes"]);
assert!(args.all);
assert!(args.yes);
assert!(args.inputs.is_empty());
}
#[test]
fn test_rm_args_parsing_with_inputs_and_yes() {
let args = RmArgs::parse_from(&["pakker", "rm", "mod1", "--yes"]);
assert!(!args.all);
assert!(args.yes);
assert_eq!(args.inputs, vec!["mod1"]);
}
#[test]
fn test_import_args_parsing_deps_flag() {
let args =
ImportArgs::parse_from(&["pakker", "import", "--deps", "pack.zip"]);
assert!(args.deps);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_import_args_parsing_no_deps_default() {
let args = ImportArgs::parse_from(&["pakker", "import", "pack.zip"]);
assert!(!args.deps);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_import_args_parsing_deps_with_yes() {
let args = ImportArgs::parse_from(&[
"pakker", "import", "--deps", "--yes", "pack.zip",
]);
assert!(args.deps);
assert!(args.yes);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_import_args_parsing_short_deps_flag() {
let args = ImportArgs::parse_from(&["pakker", "import", "-D", "pack.zip"]);
assert!(args.deps);
assert_eq!(args.file, "pack.zip");
}
#[test]
fn test_export_args_parsing_show_io_errors() {
let args =
ExportArgs::parse_from(&["pakker", "export", "--show-io-errors"]);
assert!(args.show_io_errors);
assert!(!args.no_server);
}
#[test]
fn test_export_args_parsing_no_server() {
let args = ExportArgs::parse_from(&["pakker", "export", "--no-server"]);
assert!(args.no_server);
assert!(!args.show_io_errors);
}
#[test]
fn test_export_args_parsing_both_flags() {
let args = ExportArgs::parse_from(&[
"pakker",
"export",
"--show-io-errors",
"--no-server",
"--profile",
"modrinth",
]);
assert!(args.show_io_errors);
assert!(args.no_server);
assert_eq!(args.profile, Some("modrinth".to_string()));
}
#[test]
fn test_export_args_parsing_with_output() {
let args = ExportArgs::parse_from(&[
"pakker",
"export",
"--output",
"/tmp/export",
"--profile",
"curseforge",
]);
assert_eq!(args.output, Some("/tmp/export".to_string()));
assert_eq!(args.profile, Some("curseforge".to_string()));
}
#[test]
fn test_export_args_parsing_pakker_layout() {
let args = ExportArgs::parse_from(&["pakker", "export", "--pakker-layout"]);
assert!(args.pakker_layout);
}
#[test]
fn test_config_with_export_server_side_projects_to_client_true() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: Some(true),
};
assert_eq!(config.export_server_side_projects_to_client, Some(true));
}
#[test]
fn test_config_with_export_server_side_projects_to_client_false() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: Some(false),
};
assert_eq!(config.export_server_side_projects_to_client, Some(false));
}
#[test]
fn test_config_without_export_server_side_projects_to_client() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
file_count_preference: None,
};
assert!(config.export_server_side_projects_to_client.is_none());
}
#[test]
fn test_config_serialization_with_export_server_side() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: Some("A test modpack".to_string()),
author: Some("Test Author".to_string()),
overrides: vec!["overrides".to_string()],
server_overrides: Some(vec![
"server-overrides".to_string(),
]),
client_overrides: Some(vec![
"client-overrides".to_string(),
]),
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: Some(true),
};
let json = serde_json::to_string_pretty(&config).unwrap();
assert!(json.contains("exportServerSideProjectsToClient"));
assert!(json.contains("true"));
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert_eq!(
deserialized.export_server_side_projects_to_client,
Some(true)
);
}
#[test]
fn test_config_serialization_without_export_server_side() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: std::collections::HashMap::new(),
projects: None,
export_profiles: None,
export_server_side_projects_to_client: None,
file_count_preference: None,
};
let json = serde_json::to_string_pretty(&config).unwrap();
assert!(!json.contains("exportServerSideProjectsToClient"));
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert!(deserialized.export_server_side_projects_to_client.is_none());
}
#[test]
fn test_config_default_has_no_export_server_side() {
let config = Config::default();
assert!(config.export_server_side_projects_to_client.is_none());
}
#[test]
fn test_export_args_all_flags_together() {
let args = ExportArgs::parse_from(&[
"pakker",
"export",
"--profile",
"modrinth",
"--output",
"/tmp/out",
"--pakker-layout",
"--show-io-errors",
"--no-server",
]);
assert_eq!(args.profile, Some("modrinth".to_string()));
assert_eq!(args.output, Some("/tmp/out".to_string()));
assert!(args.pakker_layout);
assert!(args.show_io_errors);
assert!(args.no_server);
}
}