initial commit

Signed-off-by: NotAShelf <raf@notashelf.dev>
Change-Id: Ife1391ed23a1e7f388b1b5eca90b9ea76a6a6964
This commit is contained in:
raf 2026-01-29 19:36:25 +03:00
commit ef28bdaeb4
Signed by: NotAShelf
GPG key ID: 29D95B64378DB4BF
63 changed files with 17292 additions and 0 deletions

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
/target
ai-docs/sessions/*/
.opencode/sessions/

26
.rustfmt.toml Normal file
View file

@ -0,0 +1,26 @@
condense_wildcard_suffixes = true
doc_comment_code_block_width = 80
edition = "2024" # Keep in sync with Cargo.toml.
enum_discrim_align_threshold = 60
force_explicit_abi = false
force_multiline_blocks = true
format_code_in_doc_comments = true
format_macro_matchers = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Upper"
imports_granularity = "Crate"
imports_layout = "HorizontalVertical"
inline_attribute_width = 60
match_block_trailing_comma = true
max_width = 80
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
overflow_delimited_expr = true
struct_field_align_threshold = 60
tab_spaces = 2
unstable_features = true
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

13
.taplo.toml Normal file
View file

@ -0,0 +1,13 @@
[formatting]
align_entries = true
column_width = 110
compact_arrays = false
reorder_inline_tables = false
reorder_keys = true
[[rule]]
include = [ "**/Cargo.toml" ]
keys = [ "package" ]
[rule.formatting]
reorder_keys = false

2969
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

53
Cargo.toml Normal file
View file

@ -0,0 +1,53 @@
[package]
name = "pakker"
version = "0.1.0"
edition = "2024"
authors = [ "NotAShelf <raf@notashelf.dev" ]
[dependencies]
anyhow = "1.0.100"
async-trait = "0.1.89"
clap = { version = "4.5.54", features = [ "derive" ] }
comfy-table = "7.1"
dialoguer = "0.12.0"
env_logger = "0.11.8"
futures = "0.3.31"
git2 = "0.20.3"
indicatif = "0.18.3"
keyring = "3.6.3"
libc = "0.2.180"
log = "0.4.29"
md-5 = "0.10.6"
once_cell = "1.20"
rand = "0.9.2"
regex = "1.12"
reqwest = { version = "0.13.1", features = [ "json" ] }
serde = { version = "1.0.228", features = [ "derive" ] }
serde_json = "1.0.149"
sha1 = "0.10.6"
sha2 = "0.10.0"
strsim = "0.11.1"
tempfile = "3.24.0"
textwrap = "0.16"
thiserror = "2.0.17"
tokio = { version = "1.49.0", features = [ "full" ] }
walkdir = "2.5.0"
yansi = "1.0.1"
zip = "7.1.0"
[dev-dependencies]
mockito = "1.7.1"
tempfile = "3.24.0"
[[bin]]
name = "pakker"
path = "src/main.rs"
# Optimize crypto stuff. Building them with optimizations makes that build script
# run ~5x faster, more than offsetting the additional build time added to the
# libraries themselves.
[profile.dev.package.sha2]
opt-level = 3
[profile.dev.package.sha1]
opt-level = 3

27
build.rs Normal file
View file

@ -0,0 +1,27 @@
use std::fs;
fn main() {
println!("cargo:rerun-if-changed=build.rs");
}
#[cfg(unix)]
pub fn create_pakku_symlink() {
let exe_path =
std::env::current_exe().expect("Failed to get current exe path");
let exe_dir = exe_path.parent().expect("Failed to get exe directory");
let pakker_path = exe_dir.join("pakker");
let pakku_path = exe_dir.join("pakku");
if pakker_path.exists() {
if pakku_path.exists() {
let _ = fs::remove_file(&pakku_path);
}
let _ = std::os::unix::fs::symlink(&pakker_path, &pakku_path);
}
}
#[cfg(not(unix))]
pub fn create_pakku_symlink() {
// No-op on non-Unix systems
println!("This only works on an Unix system! Skipping Pakku symlink.")
}

603
src/cli.rs Normal file
View file

@ -0,0 +1,603 @@
pub mod commands;
use clap::{Args, Parser, Subcommand};
use crate::model::{
enums::{ProjectSide, ProjectType, UpdateStrategy},
fork::RefType,
};
#[derive(Parser)]
#[clap(name = "pakker")]
#[clap(about = "A multiplatform modpack manager for Minecraft", long_about = None)]
pub struct Cli {
/// Enable verbose output (-v for info, -vv for debug, -vvv for trace)
#[clap(short, long, action = clap::ArgAction::Count)]
pub verbose: u8,
#[clap(subcommand)]
pub command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
/// Initialize a new modpack project
Init(InitArgs),
/// Import an existing modpack
Import(ImportArgs),
/// Add projects to the modpack
Add(AddArgs),
/// Add projects with explicit platform specification (non-interactive)
#[clap(name = "add-prj", alias = "prj")]
AddPrj(AddPrjArgs),
/// Remove projects from the modpack
Rm(RmArgs),
/// Update projects
Update(UpdateArgs),
/// List projects in the modpack
Ls(LsArgs),
/// Set project properties
Set(SetArgs),
/// Link projects together
Link(LinkArgs),
/// Unlink projects
Unlink(UnlinkArgs),
/// Show differences between local and remote
Diff(DiffArgs),
/// Fetch project files
Fetch(FetchArgs),
/// Sync projects (fetch + update)
Sync(SyncArgs),
/// Export modpack
Export(ExportArgs),
/// Manage remote repositories
Remote(RemoteArgs),
/// Update modpack from remote Git repository
RemoteUpdate(RemoteUpdateArgs),
/// Check for available updates
Status(StatusArgs),
/// Inspect project details
Inspect(InspectArgs),
/// Manage API credentials
Credentials(CredentialsArgs),
/// Configure modpack properties
Cfg(CfgArgs),
/// Manage fork configuration
Fork(ForkArgs),
}
#[derive(Args)]
pub struct InitArgs {
/// Modpack name
#[clap(short, long)]
pub name: Option<String>,
/// Modpack version
#[clap(short = 'V', long)]
pub version: Option<String>,
/// Target platform
#[clap(short, long, default_value = "multiplatform")]
pub target: String,
/// Minecraft version
#[clap(short, long, default_value = "1.20.1")]
pub mc_version: String,
/// Mod loader
#[clap(short, long, default_value = "fabric")]
pub loader: String,
/// Mod loader version
#[clap(short = 'v', long, default_value = "latest")]
pub loader_version: String,
}
#[derive(Args)]
pub struct ImportArgs {
/// Path to modpack file
pub file: String,
/// Skip confirmation prompts
#[clap(short, long)]
pub yes: bool,
}
#[derive(Args)]
pub struct AddArgs {
/// Project identifiers to add
#[clap(required = true)]
pub inputs: Vec<String>,
/// Project type (mod, resourcepack, shader, datapack, world)
#[clap(short = 't', long = "type")]
pub project_type: Option<ProjectType>,
/// Skip resolving dependencies
#[clap(short = 'D', long)]
pub no_deps: bool,
/// Update if already exists
#[clap(short, long)]
pub update: bool,
/// Skip confirmation prompts
#[clap(short, long)]
pub yes: bool,
}
#[derive(Args)]
pub struct AddPrjArgs {
/// `CurseForge` project slug or ID (optional file ID: `slug#file_id`)
#[clap(long = "cf", alias = "curseforge")]
pub curseforge: Option<String>,
/// Modrinth project slug or ID (optional file ID: `slug#file_id`)
#[clap(long = "mr", alias = "modrinth")]
pub modrinth: Option<String>,
/// GitHub repository (format: owner/repo or owner/repo#tag)
#[clap(long = "gh", alias = "github")]
pub github: Option<String>,
/// Project type (mod, resourcepack, shader, datapack, world)
#[clap(short = 't', long = "type")]
pub project_type: Option<ProjectType>,
/// Project side (client, server, both)
#[clap(long)]
pub side: Option<ProjectSide>,
/// Update strategy (latest, none)
#[clap(long)]
pub strategy: Option<UpdateStrategy>,
/// Redistributable flag
#[clap(long)]
pub redistributable: Option<bool>,
/// Subpath for project file placement
#[clap(long)]
pub subpath: Option<String>,
/// Project aliases (can be specified multiple times)
#[clap(long = "alias")]
pub aliases: Vec<String>,
/// Export flag (whether to include in exports)
#[clap(long)]
pub export: Option<bool>,
/// Skip resolving dependencies
#[clap(short = 'D', long = "no-deps")]
pub no_deps: bool,
/// Skip confirmation prompts
#[clap(short, long)]
pub yes: bool,
}
#[derive(Args)]
pub struct RmArgs {
/// Project identifiers to remove
#[clap(required = true)]
pub inputs: Vec<String>,
/// Skip confirmation prompt
#[clap(short, long)]
pub yes: bool,
}
#[derive(Args)]
pub struct UpdateArgs {
/// Projects to update (empty = all)
#[arg(value_name = "PROJECT")]
pub inputs: Vec<String>,
/// Skip confirmation prompts
#[arg(short, long)]
pub yes: bool,
}
#[derive(Args)]
pub struct LsArgs {
/// Show detailed information
#[clap(short, long)]
pub detailed: bool,
/// Add update information for projects
#[clap(short = 'c', long = "check-updates")]
pub check_updates: bool,
/// Maximum length for project names
#[clap(long = "name-max-length")]
pub name_max_length: Option<usize>,
}
#[derive(Args)]
pub struct SetArgs {
/// Project identifier (optional for lockfile properties)
pub input: Option<String>,
/// Project type
#[clap(long)]
pub r#type: Option<String>,
/// Project side (client/server/both)
#[clap(long)]
pub side: Option<String>,
/// Update strategy (latest/none)
#[clap(long)]
pub strategy: Option<String>,
/// Redistributable flag
#[clap(long)]
pub redistributable: Option<bool>,
/// Change the target of the pack (curseforge, modrinth, multiplatform)
#[clap(short = 't', long)]
pub target: Option<String>,
/// Change the minecraft versions (comma-separated)
#[clap(short = 'v', long)]
pub mc_versions: Option<String>,
/// Change the mod loaders (format: name=version,name=version)
#[clap(short = 'l', long)]
pub loaders: Option<String>,
}
#[derive(Args)]
pub struct LinkArgs {
/// Source project
pub from: String,
/// Target project
pub to: String,
}
#[derive(Args)]
pub struct UnlinkArgs {
/// Source project
pub from: String,
/// Target project
pub to: String,
}
#[derive(Args)]
pub struct DiffArgs {
/// Path to old lockfile
pub old_lockfile: String,
/// Path to current lockfile (optional, defaults to pakku-lock.json)
pub current_lockfile: Option<String>,
/// Export markdown diff
#[clap(long)]
pub markdown_diff: Option<String>,
/// Export markdown (formatted)
#[clap(long)]
pub markdown: Option<String>,
/// Verbose output (show file changes)
#[clap(short, long)]
pub verbose: bool,
/// Header size for markdown (0-5)
#[clap(short = 'H', long, default_value = "2")]
pub header_size: usize,
}
#[derive(Args)]
pub struct FetchArgs {
/// Timeout for waiting on conflicting operations (seconds)
#[clap(short, long)]
pub timeout: Option<u64>,
/// Number of retry attempts for failed downloads
#[clap(short = 'r', long, default_value = "2")]
pub retry: u32,
/// Move unknown files to shelf instead of deleting
#[clap(long)]
pub shelve: bool,
}
#[derive(Args)]
pub struct SyncArgs {
/// Sync additions only
#[clap(short = 'A', long)]
pub additions: bool,
/// Sync removals only
#[clap(short = 'R', long)]
pub removals: bool,
/// Sync updates only
#[clap(short = 'U', long)]
pub updates: bool,
}
#[derive(Args)]
pub struct ExportArgs {
/// Export profile (curseforge, modrinth, serverpack)
/// If not specified, all profiles will be exported
#[clap(short, long)]
pub profile: Option<String>,
/// Output directory
#[clap(short, long)]
pub output: Option<String>,
/// Use Pakker-compatible output layout (build/<profile>/...)
/// Default is Pakker layout (exports/...)
#[clap(long)]
pub pakker_layout: bool,
}
#[derive(Args)]
pub struct RemoteArgs {
/// Git URL to install from (if empty, shows status)
pub url: Option<String>,
/// Branch to checkout (instead of remote's HEAD)
#[clap(short, long)]
pub branch: Option<String>,
/// Install server pack
#[clap(short = 'S', long)]
pub server_pack: bool,
/// Retry count for downloads
#[clap(short, long, default_value = "2")]
pub retry: u32,
/// Remove remote from modpack
#[clap(long = "rm", long = "remove")]
pub remove: bool,
}
#[derive(Args)]
pub struct RemoteUpdateArgs {
/// Branch to checkout instead of remote's HEAD
#[clap(short, long)]
pub branch: Option<String>,
/// Install server pack instead of full modpack
#[clap(short, long)]
pub server_pack: bool,
}
#[derive(Args)]
pub struct StatusArgs {
/// Check updates in parallel
#[clap(short, long)]
pub parallel: bool,
}
#[derive(Args)]
pub struct InspectArgs {
/// Project identifiers to inspect
#[clap(required = true)]
pub projects: Vec<String>,
}
#[derive(Args)]
pub struct CredentialsArgs {
/// Delete stored credentials (defaults to deleting both file and keyring)
#[clap(short, long)]
pub delete: bool,
/// Delete credentials file (~/.pakku/credentials)
#[clap(long)]
pub delete_file: bool,
/// Delete credentials from keyring (service: pakker)
#[clap(long)]
pub delete_keyring: bool,
#[clap(subcommand)]
pub subcommand: Option<CredentialsSubcommand>,
}
#[derive(Subcommand)]
pub enum CredentialsSubcommand {
/// Set API credentials
Set(CredentialsSetArgs),
}
#[derive(Args)]
pub struct CredentialsSetArgs {
/// `CurseForge` API key
#[clap(long)]
pub cf_api_key: Option<String>,
/// Modrinth API token
#[clap(long)]
pub modrinth_token: Option<String>,
/// GitHub access token
#[clap(long)]
pub gh_access_token: Option<String>,
}
#[derive(Args)]
pub struct CfgArgs {
/// Modpack name
#[clap(long)]
pub name: Option<String>,
/// Modpack version
#[clap(long)]
pub version: Option<String>,
/// Modpack description
#[clap(long)]
pub description: Option<String>,
/// Modpack author
#[clap(long)]
pub author: Option<String>,
/// Path for mods
#[clap(long)]
pub mods_path: Option<String>,
/// Path for resource packs
#[clap(long)]
pub resource_packs_path: Option<String>,
/// Path for data packs
#[clap(long)]
pub data_packs_path: Option<String>,
/// Path for worlds
#[clap(long)]
pub worlds_path: Option<String>,
/// Path for shaders
#[clap(long)]
pub shaders_path: Option<String>,
#[clap(subcommand)]
pub subcommand: Option<CfgSubcommand>,
}
#[derive(Subcommand)]
pub enum CfgSubcommand {
/// Configure per-project settings
Prj(CfgPrjArgs),
}
#[derive(Args)]
pub struct CfgPrjArgs {
/// Project identifier
pub project: String,
/// Project type
#[clap(long)]
pub r#type: Option<String>,
/// Project side (client/server/both)
#[clap(long)]
pub side: Option<String>,
/// Update strategy (latest/none)
#[clap(long)]
pub update_strategy: Option<String>,
/// Redistributable flag
#[clap(long)]
pub redistributable: Option<bool>,
/// Subpath for project
#[clap(long)]
pub subpath: Option<String>,
/// Add alias
#[clap(long)]
pub add_alias: Option<String>,
/// Remove alias
#[clap(long)]
pub remove_alias: Option<String>,
/// Export flag
#[clap(long)]
pub export: Option<bool>,
}
/// Fork subcommand arguments
#[derive(Debug, Args)]
#[command(args_conflicts_with_subcommands = true)]
pub struct ForkArgs {
#[clap(subcommand)]
pub subcommand: ForkSubcommand,
}
#[derive(Debug, Subcommand)]
pub enum ForkSubcommand {
/// Initialize fork from parent repository
Init {
/// Git URL of parent repository
#[clap(long, conflicts_with = "from_path")]
git_url: Option<String>,
/// Use current repository as parent
#[clap(long, conflicts_with = "from_path")]
from_current: bool,
/// Use an already-cloned repository as parent (path to worktree or .git)
#[clap(long, value_parser, conflicts_with_all = &["git_url", "from_current"])]
from_path: Option<String>,
/// Branch/tag/commit to track
#[clap(long)]
ref_name: Option<String>,
/// Type of ref (branch/tag/commit)
#[clap(long, value_enum)]
ref_type: Option<RefType>,
/// Remote name
#[clap(long, default_value = "origin")]
remote: Option<String>,
},
/// Update fork configuration
Set {
/// New git URL (optional)
#[clap(long)]
git_url: Option<String>,
/// Branch/tag/commit to track
#[clap(long)]
ref_name: String,
/// Type of ref (branch/tag/commit)
#[clap(long, value_enum)]
ref_type: Option<RefType>,
/// Remote name
#[clap(long)]
remote: Option<String>,
},
/// Show fork configuration
Show,
/// Remove fork configuration
Unset,
/// Sync with parent repository
Sync,
/// Promote projects to parent (legacy)
Promote {
/// Project identifiers to promote
projects: Vec<String>,
},
}

227
src/cli/commands/add.rs Normal file
View file

@ -0,0 +1,227 @@
use std::collections::HashMap;
use crate::{
error::{PakkerError, Result},
model::{Config, LockFile, Project},
platform::create_platform,
resolver::DependencyResolver,
};
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
lockfile.loaders.keys().cloned().collect()
}
pub fn create_all_platforms()
-> Result<HashMap<String, Box<dyn crate::platform::PlatformClient>>> {
let mut platforms = HashMap::new();
if let Ok(platform) = create_platform("modrinth", None) {
platforms.insert("modrinth".to_string(), platform);
}
if let Ok(platform) =
create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok())
{
platforms.insert("curseforge".to_string(), platform);
}
Ok(platforms)
}
async fn resolve_input(
input: &str,
platforms: &HashMap<String, Box<dyn crate::platform::PlatformClient>>,
lockfile: &LockFile,
) -> Result<Project> {
for platform in platforms.values() {
if let Ok(project) = platform
.request_project_with_files(
input,
&lockfile.mc_versions,
&get_loaders(lockfile),
)
.await
{
return Ok(project);
}
}
Err(PakkerError::ProjectNotFound(input.to_string()))
}
use std::path::Path;
use crate::{cli::AddArgs, model::fork::LocalConfig};
pub async fn execute(
args: AddArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
log::info!("Adding projects: {:?}", args.inputs);
// Load lockfile
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
// Check if lockfile exists (try both pakker-lock.json and pakku-lock.json)
let lockfile_exists =
lockfile_path.exists() || lockfile_dir.join("pakku-lock.json").exists();
if !lockfile_exists {
// Try to load config from both pakker.json and pakku.json
let local_config = LocalConfig::load(config_dir).or_else(|_| {
let legacy_config_path = config_dir.join("pakku.json");
if legacy_config_path.exists() {
LocalConfig::load(&config_dir.join("pakku.json"))
} else {
Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No pakker.json found",
)))
}
})?;
if local_config.has_parent() {
log::info!("Creating minimal fork lockfile with parent metadata...");
// Check for parent lockfile (try both pakker-lock.json and
// pakku-lock.json)
let parent_paths = [
lockfile_dir.join(".pakku/parent/pakker-lock.json"),
lockfile_dir.join(".pakku/parent/pakku-lock.json"),
];
let parent_found = parent_paths.iter().any(|path| path.exists());
if !parent_found {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Fork configured but parent lockfile not found at \
.pakku/parent/pakker-lock.json or .pakku/parent/pakku-lock.json",
)));
}
// Load parent lockfile to get metadata
let parent_lockfile = parent_paths
.iter()
.find(|path| path.exists())
.and_then(|path| LockFile::load(path.parent().unwrap()).ok())
.ok_or_else(|| {
PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Failed to load parent lockfile metadata",
))
})?;
let minimal_lockfile = LockFile {
target: parent_lockfile.target,
mc_versions: parent_lockfile.mc_versions,
loaders: parent_lockfile.loaders,
projects: Vec::new(),
lockfile_version: 1,
};
minimal_lockfile.save_without_validation(lockfile_dir)?;
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"pakker-lock.json not found and no fork configured. Run 'pakker init' \
first.",
)));
}
}
let mut lockfile = LockFile::load_with_validation(lockfile_dir, false)?;
// Load config if available
let _config = Config::load(config_dir).ok();
// Create platforms
let platforms = create_all_platforms()?;
let mut new_projects = Vec::new();
// Resolve each input
for input in &args.inputs {
let project = resolve_input(input, &platforms, &lockfile).await?;
// Check if already exists by matching platform IDs (not pakku_id which is
// random)
let already_exists = lockfile.projects.iter().any(|p| {
// Check if any platform ID matches
project.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
});
if already_exists {
if args.update {
log::info!("Updating existing project: {}", project.get_name());
// Find and replace the existing project
if let Some(pos) = lockfile.projects.iter().position(|p| {
project.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
}) {
lockfile.projects[pos] = project;
}
continue;
}
log::info!("Project already exists: {}", project.get_name());
continue;
}
new_projects.push(project);
}
// Resolve dependencies unless --no-deps is specified
if !args.no_deps {
log::info!("Resolving dependencies...");
let mut resolver = DependencyResolver::new();
let mut all_new_projects = new_projects.clone();
for project in &mut new_projects {
let deps = resolver.resolve(project, &mut lockfile, &platforms).await?;
for dep in deps {
if !lockfile.projects.iter().any(|p| p.pakku_id == dep.pakku_id)
&& !all_new_projects.iter().any(|p| p.pakku_id == dep.pakku_id)
{
// Prompt user for confirmation unless --yes flag is set
if !args.yes {
let prompt_msg = format!(
"Add dependency '{}' required by '{}'?",
dep.get_name(),
project.get_name()
);
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
log::info!("Skipping dependency: {}", dep.get_name());
continue;
}
}
log::info!("Adding dependency: {}", dep.get_name());
all_new_projects.push(dep);
}
}
}
new_projects = all_new_projects;
}
// Add projects to lockfile (updates already handled above)
for project in new_projects {
lockfile.add_project(project);
}
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully added {} project(s)", args.inputs.len());
Ok(())
}

386
src/cli/commands/add_prj.rs Normal file
View file

@ -0,0 +1,386 @@
use std::{collections::HashMap, path::Path};
use crate::{
error::{PakkerError, Result},
model::{
Config,
LockFile,
Project,
enums::{ProjectSide, ProjectType, UpdateStrategy},
},
platform::create_platform,
resolver::DependencyResolver,
};
/// Parse a common project argument (slug or ID with optional file ID)
/// Format: "input" or "`input#file_id`"
fn parse_common_arg(input: &str) -> (String, Option<String>) {
if let Some((project_input, file_id)) = input.split_once('#') {
(project_input.to_string(), Some(file_id.to_string()))
} else {
(input.to_string(), None)
}
}
/// Parse a GitHub argument (owner/repo with optional tag)
/// Format: "owner/repo" or "owner/repo#tag"
fn parse_github_arg(input: &str) -> Result<(String, String, Option<String>)> {
let (repo_part, tag) = if let Some((r, t)) = input.split_once('#') {
(r, Some(t.to_string()))
} else {
(input, None)
};
if let Some((owner, repo)) = repo_part.split_once('/') {
Ok((owner.to_string(), repo.to_string(), tag))
} else {
Err(PakkerError::InvalidInput(format!(
"Invalid GitHub format '{input}'. Expected: owner/repo or owner/repo#tag"
)))
}
}
fn get_loaders(lockfile: &LockFile) -> Vec<String> {
lockfile.loaders.keys().cloned().collect()
}
pub async fn execute(
cf_arg: Option<String>,
mr_arg: Option<String>,
gh_arg: Option<String>,
project_type: Option<ProjectType>,
project_side: Option<ProjectSide>,
update_strategy: Option<UpdateStrategy>,
redistributable: Option<bool>,
subpath: Option<String>,
aliases: Vec<String>,
export: Option<bool>,
no_deps: bool,
yes: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
// At least one platform must be specified
if cf_arg.is_none() && mr_arg.is_none() && gh_arg.is_none() {
return Err(PakkerError::InvalidInput(
"At least one platform must be specified (--cf, --mr, or --gh)"
.to_string(),
));
}
log::info!("Adding project with explicit platform specification");
// Load lockfile
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Load config if available
let _config = Config::load(config_dir).ok();
// Get MC versions and loaders from lockfile
let mc_versions = &lockfile.mc_versions;
let loaders = get_loaders(&lockfile);
// Fetch projects from each specified platform
let mut projects_to_merge: Vec<Project> = Vec::new();
// CurseForge
if let Some(cf_input) = cf_arg {
log::info!("Fetching from CurseForge: {cf_input}");
let (input, file_id) = parse_common_arg(&cf_input);
let cf_api_key = std::env::var("CURSEFORGE_API_KEY").ok();
let platform = create_platform("curseforge", cf_api_key)?;
let mut project = platform
.request_project_with_files(&input, mc_versions, &loaders)
.await
.map_err(|e| {
PakkerError::ProjectNotFound(format!(
"CurseForge project '{input}': {e}"
))
})?;
// If file_id specified, filter to that file
if let Some(fid) = file_id {
project.files.retain(|f| f.id == fid);
if project.files.is_empty() {
return Err(PakkerError::FileSelectionError(format!(
"File ID '{fid}' not found for CurseForge project '{input}'"
)));
}
}
projects_to_merge.push(project);
}
// Modrinth
if let Some(mr_input) = mr_arg {
log::info!("Fetching from Modrinth: {mr_input}");
let (input, file_id) = parse_common_arg(&mr_input);
let platform = create_platform("modrinth", None)?;
let mut project = platform
.request_project_with_files(&input, mc_versions, &loaders)
.await
.map_err(|e| {
PakkerError::ProjectNotFound(format!("Modrinth project '{input}': {e}"))
})?;
// If file_id specified, filter to that file
if let Some(fid) = file_id {
project.files.retain(|f| f.id == fid);
if project.files.is_empty() {
return Err(PakkerError::FileSelectionError(format!(
"File ID '{fid}' not found for Modrinth project '{input}'"
)));
}
}
projects_to_merge.push(project);
}
// GitHub
if let Some(gh_input) = gh_arg {
log::info!("Fetching from GitHub: {gh_input}");
let (owner, repo, tag) = parse_github_arg(&gh_input)?;
let gh_token = std::env::var("GITHUB_TOKEN").ok();
let platform = create_platform("github", gh_token)?;
let repo_path = format!("{owner}/{repo}");
let mut project = platform
.request_project_with_files(&repo_path, mc_versions, &loaders)
.await
.map_err(|e| {
PakkerError::ProjectNotFound(format!(
"GitHub repository '{owner}/{repo}': {e}"
))
})?;
// If tag specified, filter to that tag
if let Some(t) = tag {
project.files.retain(|f| f.id == t);
if project.files.is_empty() {
return Err(PakkerError::FileSelectionError(format!(
"Tag '{t}' not found for GitHub repository '{owner}/{repo}'"
)));
}
}
projects_to_merge.push(project);
}
// Merge all fetched projects into one
if projects_to_merge.is_empty() {
return Err(PakkerError::ProjectNotFound(
"No projects could be fetched from specified platforms".to_string(),
));
}
let mut combined_project = projects_to_merge.remove(0);
for project in projects_to_merge {
combined_project.merge(project);
}
// Apply user-specified properties
if let Some(pt) = project_type {
combined_project.r#type = pt;
}
if let Some(ps) = project_side {
combined_project.side = ps;
}
if let Some(us) = update_strategy {
combined_project.update_strategy = us;
}
if let Some(r) = redistributable {
combined_project.redistributable = r;
}
if let Some(sp) = subpath {
combined_project.subpath = Some(sp);
}
if let Some(e) = export {
combined_project.export = e;
}
// Add aliases
for alias in aliases {
combined_project.aliases.insert(alias);
}
// Check if project already exists
let existing_pos = lockfile.projects.iter().position(|p| {
// Check if any platform ID matches
combined_project.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
});
let project_name = combined_project.get_name();
if let Some(pos) = existing_pos {
let existing_project = &lockfile.projects[pos];
let existing_name = existing_project.get_name();
if !yes {
let prompt_msg = format!(
"Project '{existing_name}' already exists. Replace with \
'{project_name}'?"
);
if !crate::ui_utils::prompt_yes_no(&prompt_msg, false)? {
log::info!("Operation cancelled by user");
return Ok(());
}
}
log::info!("Replacing existing project: {existing_name}");
lockfile.projects[pos] = combined_project.clone();
println!("✓ Replaced '{existing_name}' with '{project_name}'");
} else {
if !yes {
let prompt_msg = format!("Add project '{project_name}'?");
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
log::info!("Operation cancelled by user");
return Ok(());
}
}
lockfile.add_project(combined_project.clone());
println!("✓ Added '{project_name}'");
}
// Resolve dependencies unless --no-deps is specified
if !no_deps {
log::info!("Resolving dependencies...");
let platforms = create_all_platforms()?;
let mut resolver = DependencyResolver::new();
let deps = resolver
.resolve(&mut combined_project, &mut lockfile, &platforms)
.await?;
for dep in deps {
// Skip if already in lockfile
if lockfile.projects.iter().any(|p| {
dep.id.iter().any(|(platform, id)| {
p.id
.get(platform)
.is_some_and(|existing_id| existing_id == id)
})
}) {
continue;
}
let dep_name = dep.get_name();
// Prompt user for confirmation unless --yes flag is set
if !yes {
let prompt_msg =
format!("Add dependency '{dep_name}' required by '{project_name}'?");
if !crate::ui_utils::prompt_yes_no(&prompt_msg, true)? {
log::info!("Skipping dependency: {dep_name}");
continue;
}
}
log::info!("Adding dependency: {dep_name}");
lockfile.add_project(dep);
println!(" ✓ Added dependency '{dep_name}'");
}
}
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully completed add-prj operation");
Ok(())
}
fn create_all_platforms()
-> Result<HashMap<String, Box<dyn crate::platform::PlatformClient>>> {
let mut platforms = HashMap::new();
if let Ok(platform) = create_platform("modrinth", None) {
platforms.insert("modrinth".to_string(), platform);
}
if let Ok(platform) =
create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok())
{
platforms.insert("curseforge".to_string(), platform);
}
if let Ok(platform) =
create_platform("github", std::env::var("GITHUB_TOKEN").ok())
{
platforms.insert("github".to_string(), platform);
}
Ok(platforms)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_common_arg_without_file_id() {
let (input, file_id) = parse_common_arg("fabric-api");
assert_eq!(input, "fabric-api");
assert_eq!(file_id, None);
}
#[test]
fn test_parse_common_arg_with_file_id() {
let (input, file_id) = parse_common_arg("fabric-api#12345");
assert_eq!(input, "fabric-api");
assert_eq!(file_id, Some("12345".to_string()));
}
#[test]
fn test_parse_github_arg_owner_repo() {
let result = parse_github_arg("FabricMC/fabric");
assert!(result.is_ok());
let (owner, repo, tag) = result.unwrap();
assert_eq!(owner, "FabricMC");
assert_eq!(repo, "fabric");
assert_eq!(tag, None);
}
#[test]
fn test_parse_github_arg_with_tag() {
let result = parse_github_arg("FabricMC/fabric#v0.15.0");
assert!(result.is_ok());
let (owner, repo, tag) = result.unwrap();
assert_eq!(owner, "FabricMC");
assert_eq!(repo, "fabric");
assert_eq!(tag, Some("v0.15.0".to_string()));
}
#[test]
fn test_parse_github_arg_invalid() {
let result = parse_github_arg("invalid-format");
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("Invalid GitHub format")
);
}
#[test]
fn test_parse_github_arg_missing_repo() {
let result = parse_github_arg("FabricMC/");
assert!(result.is_ok());
let (owner, repo, tag) = result.unwrap();
assert_eq!(owner, "FabricMC");
assert_eq!(repo, "");
assert_eq!(tag, None);
}
}

101
src/cli/commands/cfg.rs Normal file
View file

@ -0,0 +1,101 @@
use std::path::Path;
use yansi::Paint;
use crate::{error::Result, model::config::Config};
pub fn execute(
config_path: &Path,
name: Option<String>,
version: Option<String>,
description: Option<String>,
author: Option<String>,
mods_path: Option<String>,
resource_packs_path: Option<String>,
data_packs_path: Option<String>,
worlds_path: Option<String>,
shaders_path: Option<String>,
) -> Result<()> {
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let mut config = Config::load(config_dir)?;
let mut changed = false;
// Modpack properties
if let Some(new_name) = name {
config.name = new_name.clone();
println!("{}", format!("✓ 'name' set to '{new_name}'").green());
changed = true;
}
if let Some(new_version) = version {
config.version = new_version.clone();
println!("{}", format!("✓ 'version' set to '{new_version}'").green());
changed = true;
}
if let Some(new_description) = description {
config.description = Some(new_description.clone());
println!(
"{}",
format!("✓ 'description' set to '{new_description}'").green()
);
changed = true;
}
if let Some(new_author) = author {
config.author = Some(new_author.clone());
println!("{}", format!("✓ 'author' set to '{new_author}'").green());
changed = true;
}
// Project type paths
if let Some(path) = mods_path {
config.paths.insert("mod".to_string(), path.clone());
println!("{}", format!("✓ 'paths.mod' set to '{path}'").green());
changed = true;
}
if let Some(path) = resource_packs_path {
config
.paths
.insert("resource-pack".to_string(), path.clone());
println!(
"{}",
format!("✓ 'paths.resource-pack' set to '{path}'").green()
);
changed = true;
}
if let Some(path) = data_packs_path {
config.paths.insert("data-pack".to_string(), path.clone());
println!("{}", format!("✓ 'paths.data-pack' set to '{path}'").green());
changed = true;
}
if let Some(path) = worlds_path {
config.paths.insert("world".to_string(), path.clone());
println!("{}", format!("✓ 'paths.world' set to '{path}'").green());
changed = true;
}
if let Some(path) = shaders_path {
config.paths.insert("shader".to_string(), path.clone());
println!("{}", format!("✓ 'paths.shader' set to '{path}'").green());
changed = true;
}
if !changed {
eprintln!(
"{}",
"No changes specified. Use --help for options.".yellow()
);
return Ok(());
}
// Config::save expects directory path, not file path
let config_dir = config_path.parent().unwrap_or(Path::new("."));
config.save(config_dir)?;
println!("\n{}", "Configuration updated successfully".green().bold());
Ok(())
}

201
src/cli/commands/cfg_prj.rs Normal file
View file

@ -0,0 +1,201 @@
use std::path::Path;
use yansi::Paint;
use crate::{
error::{PakkerError, Result},
model::{
config::Config,
enums::{ProjectSide, ProjectType, UpdateStrategy},
lockfile::LockFile,
},
};
pub fn execute(
config_path: &Path,
lockfile_path: &Path,
project: String,
r#type: Option<&str>,
side: Option<&str>,
update_strategy: Option<&str>,
redistributable: Option<bool>,
subpath: Option<String>,
add_alias: Option<String>,
remove_alias: Option<String>,
export: Option<bool>,
) -> Result<()> {
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let mut config = Config::load(config_dir)?;
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
// Find the project in lockfile to get its pakku_id
// Try multiple lookup strategies: pakku_id first, then slug, then name
let found_project = lockfile
.find_project(&project)
.or_else(|| {
// Try to find by slug on any platform
lockfile
.projects
.iter()
.find(|p| p.slug.values().any(|s| s.eq_ignore_ascii_case(&project)))
})
.or_else(|| {
// Try to find by name on any platform
lockfile
.projects
.iter()
.find(|p| p.name.values().any(|n| n.eq_ignore_ascii_case(&project)))
})
.ok_or_else(|| PakkerError::ProjectNotFound(project.clone()))?;
let pakku_id = found_project.pakku_id.as_ref().ok_or_else(|| {
PakkerError::InvalidProject("Project has no pakku_id".to_string())
})?;
// Get or create project config
let mut project_config = config
.get_project_config(pakku_id)
.cloned()
.unwrap_or_default();
let mut changed = false;
if let Some(type_str) = r#type {
let parsed_type = match type_str.to_uppercase().as_str() {
"MOD" => ProjectType::Mod,
"RESOURCE_PACK" | "RESOURCEPACK" => ProjectType::ResourcePack,
"DATA_PACK" | "DATAPACK" => ProjectType::DataPack,
"SHADER" => ProjectType::Shader,
"WORLD" => ProjectType::World,
_ => {
return Err(PakkerError::InvalidProject(format!(
"Invalid type: {type_str}"
)));
},
};
project_config.r#type = Some(parsed_type);
println!(
"{}",
format!("✓ 'type' set to '{parsed_type:?}' for '{pakku_id}'").green()
);
changed = true;
}
if let Some(side_str) = side {
let parsed_side = match side_str.to_uppercase().as_str() {
"CLIENT" => ProjectSide::Client,
"SERVER" => ProjectSide::Server,
"BOTH" => ProjectSide::Both,
_ => {
return Err(PakkerError::InvalidProject(format!(
"Invalid side: {side_str}"
)));
},
};
project_config.side = Some(parsed_side);
println!(
"{}",
format!("✓ 'side' set to '{parsed_side:?}' for '{pakku_id}'").green()
);
changed = true;
}
if let Some(strategy_str) = update_strategy {
let parsed_strategy = match strategy_str.to_uppercase().as_str() {
"LATEST" => UpdateStrategy::Latest,
"NONE" => UpdateStrategy::None,
_ => {
return Err(PakkerError::InvalidProject(format!(
"Invalid update strategy: {strategy_str}"
)));
},
};
project_config.update_strategy = Some(parsed_strategy);
println!(
"{}",
format!(
"✓ 'updateStrategy' set to '{parsed_strategy:?}' for '{pakku_id}'"
)
.green()
);
changed = true;
}
if let Some(new_redistributable) = redistributable {
project_config.redistributable = Some(new_redistributable);
println!(
"{}",
format!(
"✓ 'redistributable' set to '{new_redistributable}' for '{pakku_id}'"
)
.green()
);
changed = true;
}
if let Some(new_subpath) = subpath {
project_config.subpath = Some(new_subpath.clone());
println!(
"{}",
format!("✓ 'subpath' set to '{new_subpath}' for '{pakku_id}'").green()
);
changed = true;
}
if let Some(alias_to_add) = add_alias {
let mut aliases = project_config.aliases.clone().unwrap_or_default();
if !aliases.contains(&alias_to_add) {
aliases.push(alias_to_add.clone());
project_config.aliases = Some(aliases);
println!(
"{}",
format!("✓ Added alias '{alias_to_add}' for '{pakku_id}'").green()
);
changed = true;
}
}
if let Some(alias_to_remove) = remove_alias
&& let Some(mut aliases) = project_config.aliases.clone()
{
aliases.retain(|a| a != &alias_to_remove);
project_config.aliases = Some(aliases);
println!(
"{}",
format!("✓ Removed alias '{alias_to_remove}' from '{pakku_id}'").green()
);
changed = true;
}
if let Some(new_export) = export {
project_config.export = Some(new_export);
println!(
"{}",
format!("✓ 'export' set to '{new_export}' for '{pakku_id}'").green()
);
changed = true;
}
if !changed {
eprintln!(
"{}",
"No changes specified. Use --help for options.".yellow()
);
return Ok(());
}
config.set_project_config(pakku_id.clone(), project_config);
// Config::save expects directory path, not file path
let config_dir = config_path.parent().unwrap_or(Path::new("."));
config.save(config_dir)?;
println!(
"\n{}",
format!("Project configuration updated for '{pakku_id}'")
.green()
.bold()
);
Ok(())
}

View file

@ -0,0 +1,112 @@
use yansi::Paint;
use crate::{
error::Result,
model::{
PakkerCredentialsFile,
credentials::{CredentialsSource, ResolvedCredentials},
},
};
pub fn execute(
delete: bool,
delete_file: bool,
delete_keyring: bool,
) -> Result<()> {
let delete_effective = delete || delete_file || delete_keyring;
if delete_effective {
// Pakker must never delete or modify Pakku's credentials file
// (~/.pakku/credentials). Deletion here only affects Pakker-managed
// storage (keyring + Pakker-owned file).
let delete_keyring = delete_keyring || delete;
let delete_pakker_file = delete_file || delete;
if delete_pakker_file {
PakkerCredentialsFile::delete()?;
}
if delete_keyring {
ResolvedCredentials::delete_keyring()?;
}
println!("Credentials deleted.");
return Ok(());
}
let creds = ResolvedCredentials::load()?;
let has_any = creds.curseforge_api_key().is_some()
|| creds.modrinth_token().is_some()
|| creds.github_access_token().is_some();
if !has_any {
println!("{}", "No credentials stored".yellow());
println!("\nUse 'pakker credentials set' to add credentials");
return Ok(());
}
println!("{}", "Stored Credentials:".cyan().bold());
println!();
print_credential(
"CurseForge API Key",
creds.curseforge_api_key(),
creds.curseforge_source(),
);
print_credential(
"Modrinth Token",
creds.modrinth_token(),
creds.modrinth_source(),
);
print_credential(
"GitHub Access Token",
creds.github_access_token(),
creds.github_source(),
);
println!();
println!(
"{}",
format!(
"Credentials file: {}",
PakkerCredentialsFile::get_path()?.display()
)
.cyan()
);
Ok(())
}
fn print_credential(
label: &str,
value: Option<&str>,
source: Option<CredentialsSource>,
) {
if let Some(v) = value {
let masked = mask_key(v);
let source = source.map_or("unknown", source_label);
println!(" {} {} ({})", format!("{label}:").yellow(), masked, source);
}
}
const fn source_label(source: CredentialsSource) -> &'static str {
match source {
CredentialsSource::Env => "env",
CredentialsSource::Keyring => "keyring",
CredentialsSource::PakkerFile => "pakker-file",
}
}
fn mask_key(key: &str) -> String {
if key.len() <= 12 {
return "*".repeat(key.len());
}
let start = &key[..8];
let end = &key[key.len() - 4..];
let middle = "*".repeat(key.len() - 12);
format!("{start}{middle}{end}")
}

View file

@ -0,0 +1,74 @@
use crate::{
error::{PakkerError, Result},
model::{PakkerCredentialsFile, set_keyring_secret},
};
pub fn execute(
curseforge_api_key: Option<String>,
modrinth_token: Option<String>,
github_access_token: Option<String>,
) -> Result<()> {
let mut creds = PakkerCredentialsFile::load()?;
let mut updated_any = false;
if let Some(key) = curseforge_api_key {
let key = key.trim().to_string();
if key.is_empty() {
return Err(PakkerError::InternalError(
"CurseForge API key cannot be empty".to_string(),
));
}
println!("Setting CurseForge API key...");
set_keyring_secret("curseforge_api_key", &key)?;
creds.curseforge_api_key = Some(key);
updated_any = true;
}
if let Some(token) = modrinth_token {
let token = token.trim().to_string();
if token.is_empty() {
return Err(PakkerError::InternalError(
"Modrinth token cannot be empty".to_string(),
));
}
println!("Setting Modrinth token...");
set_keyring_secret("modrinth_token", &token)?;
creds.modrinth_token = Some(token);
updated_any = true;
}
if let Some(token) = github_access_token {
let token = token.trim().to_string();
if token.is_empty() {
return Err(PakkerError::InternalError(
"GitHub access token cannot be empty".to_string(),
));
}
println!("Setting GitHub access token...");
set_keyring_secret("github_access_token", &token)?;
creds.github_access_token = Some(token);
updated_any = true;
}
if !updated_any {
println!(
"No credentials provided. Use --cf-api-key, --modrinth-token, or \
--gh-access-token."
);
return Ok(());
}
creds.save()?;
println!("Credentials saved.");
println!(
"Credentials file: {}",
PakkerCredentialsFile::get_path()?.display()
);
println!("Keyring service: pakker");
Ok(())
}

457
src/cli/commands/diff.rs Normal file
View file

@ -0,0 +1,457 @@
use std::{
collections::{HashMap, HashSet},
fs,
path::Path,
};
use crate::{cli::DiffArgs, error::Result, model::LockFile};
#[derive(Debug)]
enum ChangeType {
Added,
Removed,
Updated,
}
#[derive(Debug)]
struct ProjectChange {
name: String,
change_type: ChangeType,
old_file: Option<String>,
new_file: Option<String>,
}
pub fn execute(args: DiffArgs, _lockfile_path: &Path) -> Result<()> {
log::info!("Comparing lockfiles");
// Load old lockfile
let old_path = Path::new(&args.old_lockfile);
let old_dir = old_path.parent().unwrap_or(Path::new("."));
let old_lockfile = LockFile::load(old_dir)?;
// Load current lockfile
let current_path = args
.current_lockfile
.as_ref()
.map_or(Path::new("pakku-lock.json"), Path::new);
let current_dir = current_path.parent().unwrap_or(Path::new("."));
let current_lockfile = LockFile::load(current_dir)?;
// Compare metadata
let mut changes = Vec::new();
// Check MC versions
let old_mc: HashSet<_> = old_lockfile.mc_versions.iter().collect();
let new_mc: HashSet<_> = current_lockfile.mc_versions.iter().collect();
let mc_added: Vec<_> = new_mc.difference(&old_mc).collect();
let mc_removed: Vec<_> = old_mc.difference(&new_mc).collect();
// Check loaders
let old_loaders = &old_lockfile.loaders;
let new_loaders = &current_lockfile.loaders;
// Compare projects
let old_projects: HashMap<_, _> = old_lockfile
.projects
.iter()
.map(|p| (&p.pakku_id, p))
.collect();
let new_projects: HashMap<_, _> = current_lockfile
.projects
.iter()
.map(|p| (&p.pakku_id, p))
.collect();
// Find added, removed, updated projects
for (id, new_proj) in &new_projects {
if !old_projects.contains_key(id) {
changes.push(ProjectChange {
name: new_proj.name.values().next().cloned().unwrap_or_default(),
change_type: ChangeType::Added,
old_file: None,
new_file: new_proj.files.first().map(|f| f.file_name.clone()),
});
} else if let Some(old_proj) = old_projects.get(id) {
let old_file_name = old_proj.files.first().map(|f| &f.file_name);
let new_file_name = new_proj.files.first().map(|f| &f.file_name);
if old_file_name != new_file_name {
changes.push(ProjectChange {
name: new_proj
.name
.values()
.next()
.cloned()
.unwrap_or_default(),
change_type: ChangeType::Updated,
old_file: old_file_name.cloned(),
new_file: new_file_name.cloned(),
});
}
}
}
for (id, old_proj) in &old_projects {
if !new_projects.contains_key(id) {
changes.push(ProjectChange {
name: old_proj.name.values().next().cloned().unwrap_or_default(),
change_type: ChangeType::Removed,
old_file: old_proj.files.first().map(|f| f.file_name.clone()),
new_file: None,
});
}
}
// Output results
if let Some(path) = &args.markdown_diff {
write_markdown_diff(
path,
&old_lockfile,
&current_lockfile,
&changes,
&mc_added,
&mc_removed,
old_loaders,
new_loaders,
args.verbose,
args.header_size,
)?;
} else if let Some(path) = &args.markdown {
write_markdown(
path,
&old_lockfile,
&current_lockfile,
&changes,
&mc_added,
&mc_removed,
old_loaders,
new_loaders,
args.verbose,
args.header_size,
)?;
} else {
print_terminal_diff(
&old_lockfile,
&current_lockfile,
&changes,
&mc_added,
&mc_removed,
old_loaders,
new_loaders,
args.verbose,
);
}
Ok(())
}
fn print_terminal_diff(
old: &LockFile,
new: &LockFile,
changes: &[ProjectChange],
mc_added: &[&&String],
mc_removed: &[&&String],
old_loaders: &HashMap<String, String>,
new_loaders: &HashMap<String, String>,
verbose: bool,
) {
println!("## Lockfile Comparison\n");
// Target
if old.target != new.target {
println!("Target: {:?} -> {:?}", old.target, new.target);
}
// MC versions
if !mc_removed.is_empty() || !mc_added.is_empty() {
println!("Minecraft Versions:");
for v in mc_removed {
println!(" - {v}");
}
for v in mc_added {
println!(" + {v}");
}
}
// Loaders
let mut loader_changes = false;
for (name, old_ver) in old_loaders {
if let Some(new_ver) = new_loaders.get(name) {
if old_ver != new_ver {
if !loader_changes {
println!("\nLoaders:");
loader_changes = true;
}
println!(" ~ {name}: {old_ver} -> {new_ver}");
}
} else {
if !loader_changes {
println!("\nLoaders:");
loader_changes = true;
}
println!(" - {name}: {old_ver}");
}
}
for (name, new_ver) in new_loaders {
if !old_loaders.contains_key(name) {
if !loader_changes {
println!("\nLoaders:");
loader_changes = true;
}
println!(" + {name}: {new_ver}");
}
}
// Projects
if !changes.is_empty() {
println!("\nProjects:");
for change in changes {
match change.change_type {
ChangeType::Added => {
print!(" + {}", change.name);
if verbose && let Some(file) = &change.new_file {
print!(" ({file})");
}
println!();
},
ChangeType::Removed => {
print!(" - {}", change.name);
if verbose && let Some(file) = &change.old_file {
print!(" ({file})");
}
println!();
},
ChangeType::Updated => {
print!(" ~ {}", change.name);
if verbose
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
{
print!(" ({old} -> {new})");
}
println!();
},
}
}
}
if mc_removed.is_empty()
&& mc_added.is_empty()
&& !loader_changes
&& changes.is_empty()
{
println!("✓ No differences found");
}
}
fn write_markdown_diff(
path: &str,
old: &LockFile,
new: &LockFile,
changes: &[ProjectChange],
mc_added: &[&&String],
mc_removed: &[&&String],
old_loaders: &HashMap<String, String>,
new_loaders: &HashMap<String, String>,
verbose: bool,
_header_size: usize,
) -> Result<()> {
let mut content = String::new();
content.push_str("```diff\n");
// Metadata changes
if old.target != new.target {
content.push_str(&format!("- Target: {:?}\n", old.target));
content.push_str(&format!("+ Target: {:?}\n", new.target));
}
if !mc_removed.is_empty() || !mc_added.is_empty() {
content.push_str("\nMinecraft Versions:\n");
for v in mc_removed {
content.push_str(&format!("- {v}\n"));
}
for v in mc_added {
content.push_str(&format!("+ {v}\n"));
}
}
// Loaders
for (name, old_ver) in old_loaders {
if let Some(new_ver) = new_loaders.get(name) {
if old_ver != new_ver {
content.push_str(&format!("- {name}: {old_ver}\n"));
content.push_str(&format!("+ {name}: {new_ver}\n"));
}
} else {
content.push_str(&format!("- {name}: {old_ver}\n"));
}
}
for (name, new_ver) in new_loaders {
if !old_loaders.contains_key(name) {
content.push_str(&format!("+ {name}: {new_ver}\n"));
}
}
// Projects
if !changes.is_empty() {
content.push_str("\nProjects:\n");
for change in changes {
match change.change_type {
ChangeType::Added => {
content.push_str(&format!("+ {}", change.name));
if verbose && let Some(file) = &change.new_file {
content.push_str(&format!(" ({file})"));
}
content.push('\n');
},
ChangeType::Removed => {
content.push_str(&format!("- {}", change.name));
if verbose && let Some(file) = &change.old_file {
content.push_str(&format!(" ({file})"));
}
content.push('\n');
},
ChangeType::Updated => {
if verbose {
if let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
{
content.push_str(&format!("- {} ({})\n", change.name, old));
content.push_str(&format!("+ {} ({})\n", change.name, new));
}
} else {
content.push_str(&format!("~ {}\n", change.name));
}
},
}
}
}
content.push_str("```\n");
fs::write(path, content)?;
println!("Diff exported to {path}");
Ok(())
}
fn write_markdown(
path: &str,
old: &LockFile,
new: &LockFile,
changes: &[ProjectChange],
mc_added: &[&&String],
mc_removed: &[&&String],
old_loaders: &HashMap<String, String>,
new_loaders: &HashMap<String, String>,
verbose: bool,
header_size: usize,
) -> Result<()> {
let header = "#".repeat(header_size.min(5));
let mut content = String::new();
content.push_str(&format!("{header} Lockfile Comparison\n\n"));
// Target
if old.target != new.target {
content.push_str(&format!(
"**Target:** {:?} → {:?}\n\n",
old.target, new.target
));
}
// MC versions
if !mc_removed.is_empty() || !mc_added.is_empty() {
content.push_str(&format!("{header} Minecraft Versions\n\n"));
for v in mc_removed {
content.push_str(&format!("- ~~{v}~~\n"));
}
for v in mc_added {
content.push_str(&format!("- **{v}** (new)\n"));
}
content.push('\n');
}
// Loaders
let mut has_loader_changes = false;
let mut loader_content = String::new();
for (name, old_ver) in old_loaders {
if let Some(new_ver) = new_loaders.get(name) {
if old_ver != new_ver {
has_loader_changes = true;
loader_content
.push_str(&format!("- **{name}:** {old_ver}{new_ver}\n"));
}
} else {
has_loader_changes = true;
loader_content.push_str(&format!("- ~~{name}: {old_ver}~~\n"));
}
}
for (name, new_ver) in new_loaders {
if !old_loaders.contains_key(name) {
has_loader_changes = true;
loader_content.push_str(&format!("- **{name}: {new_ver}** (new)\n"));
}
}
if has_loader_changes {
content.push_str(&format!("{header} Loaders\n\n"));
content.push_str(&loader_content);
content.push('\n');
}
// Projects
if !changes.is_empty() {
content.push_str(&format!("{header} Projects\n\n"));
let added: Vec<_> = changes
.iter()
.filter(|c| matches!(c.change_type, ChangeType::Added))
.collect();
let removed: Vec<_> = changes
.iter()
.filter(|c| matches!(c.change_type, ChangeType::Removed))
.collect();
let updated: Vec<_> = changes
.iter()
.filter(|c| matches!(c.change_type, ChangeType::Updated))
.collect();
if !added.is_empty() {
content.push_str(&format!("{}# Added ({})\n\n", header, added.len()));
for change in added {
content.push_str(&format!("- **{}**", change.name));
if verbose && let Some(file) = &change.new_file {
content.push_str(&format!(" ({file})"));
}
content.push('\n');
}
content.push('\n');
}
if !removed.is_empty() {
content.push_str(&format!("{}# Removed ({})\n\n", header, removed.len()));
for change in removed {
content.push_str(&format!("- ~~{}~~", change.name));
if verbose && let Some(file) = &change.old_file {
content.push_str(&format!(" ({file})"));
}
content.push('\n');
}
content.push('\n');
}
if !updated.is_empty() {
content.push_str(&format!("{}# Updated ({})\n\n", header, updated.len()));
for change in updated {
content.push_str(&format!("- **{}**", change.name));
if verbose
&& let (Some(old), Some(new)) = (&change.old_file, &change.new_file)
{
content.push_str(&format!(" ({old}{new})"));
}
content.push('\n');
}
content.push('\n');
}
}
fs::write(path, content)?;
println!("Diff exported to {path}");
Ok(())
}

291
src/cli/commands/export.rs Normal file
View file

@ -0,0 +1,291 @@
use std::path::Path;
use crate::{
cli::ExportArgs,
error::{PakkerError, Result},
export::Exporter,
ipc::{IpcCoordinator, OperationType},
model::{Config, LockFile, fork::LocalConfig},
utils::hash::compute_sha256_bytes,
};
pub async fn execute(
args: ExportArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
if let Some(ref profile) = args.profile {
log::info!("Exporting with profile: {profile}");
} else {
log::info!("Exporting all profiles");
}
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
// IPC coordination - prevent concurrent operations on the same modpack
let ipc = IpcCoordinator::new(&config_dir.to_path_buf())?;
let ipc_timeout = std::time::Duration::from_secs(60);
// Check for conflicting export operations
let conflicting = ipc.get_running_operations(OperationType::Export);
if !conflicting.is_empty() {
log::info!(
"Waiting for conflicting operations to complete: {:?}",
conflicting
.iter()
.map(|op| (op.id.clone(), op.pid))
.collect::<Vec<_>>()
);
ipc
.wait_for_conflicts(OperationType::Export, ipc_timeout)
.await?;
}
// Register this export operation
let _op_guard = ipc.register_operation(OperationType::Export)?;
// Load config to check for fork configuration
let config = Config::load(config_dir)?;
let local_config = LocalConfig::load(config_dir).ok();
// Check if this is a fork with parent
let lockfile = if let Some(local_cfg) = &local_config {
if local_cfg.parent.is_some() {
log::info!("Fork detected - merging parent and local lockfiles");
// Try parent's lockfile
let parent_paths = [".pakku/parent", ".pakker/parent"];
let mut parent_lockfile_path = None;
let mut lockfile_name = "pakku-lock.json";
for parent_dir in &parent_paths {
// Try pakker-lock.json first
let check_path = Path::new(parent_dir).join("pakker-lock.json");
if check_path.exists() {
parent_lockfile_path = Some(parent_dir);
lockfile_name = "pakker-lock.json";
break;
}
// Fall back to pakku-lock.json
let check_path = Path::new(parent_dir).join("pakku-lock.json");
if check_path.exists() {
parent_lockfile_path = Some(parent_dir);
lockfile_name = "pakku-lock.json";
break;
}
}
if let Some(parent_dir) = parent_lockfile_path {
// Load parent lockfile
let parent_lockfile = LockFile::load(Path::new(parent_dir))?;
// Verify parent lockfile hash for integrity
if let Some(stored_hash) = &local_cfg.parent_lock_hash {
let parent_lock_path = Path::new(parent_dir).join(lockfile_name);
let parent_lock_content = std::fs::read(&parent_lock_path)?;
let computed_hash = compute_sha256_bytes(&parent_lock_content);
if &computed_hash != stored_hash {
log::warn!(
"Parent lockfile hash mismatch - parent may have changed since \
last sync"
);
log::warn!("Expected: {stored_hash}, Got: {computed_hash}");
}
}
// Load local lockfile if it exists
if lockfile_path.exists() {
log::info!("Merging parent lockfile with local overrides");
let local_lockfile =
LockFile::load_with_validation(lockfile_dir, false)?;
// Merge: start with parent, override with local
merge_lockfiles(parent_lockfile, local_lockfile, local_cfg)?
} else {
log::info!("No local lockfile - using parent lockfile");
parent_lockfile
}
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Fork configured but parent lockfile not found",
)));
}
} else {
// No fork, use local lockfile
if lockfile_path.exists() {
LockFile::load(lockfile_dir)?
} else {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No lockfile found",
)));
}
}
} else {
// No local config, try local lockfile or fall back to parent
if lockfile_path.exists() {
LockFile::load(lockfile_dir)?
} else {
// Try parent's lockfile as fallback
let parent_paths = [".pakku/parent", ".pakker/parent"];
let mut parent_lockfile = None;
let mut lockfile_name = "pakku-lock.json";
for parent_dir in &parent_paths {
// Try pakker-lock.json first
let lockfile_path_check =
Path::new(parent_dir).join("pakker-lock.json");
if lockfile_path_check.exists() {
parent_lockfile = Some(parent_dir);
lockfile_name = "pakker-lock.json";
break;
}
// Fall back to pakku-lock.json
let lockfile_path_check = Path::new(parent_dir).join("pakku-lock.json");
if lockfile_path_check.exists() {
parent_lockfile = Some(parent_dir);
lockfile_name = "pakku-lock.json";
break;
}
}
match parent_lockfile {
Some(parent_dir) => {
log::info!(
"Using parent's lockfile ({lockfile_name}) from {parent_dir}"
);
LockFile::load(Path::new(parent_dir))?
},
None => {
return Err(PakkerError::IoError(std::io::Error::new(
std::io::ErrorKind::NotFound,
"No lockfile found (neither local nor parent's)",
)));
},
}
}
};
// Determine output path
let output_path = if args.pakker_layout {
"build"
} else {
args.output.as_deref().unwrap_or("exports")
};
// Create exporter
let mut exporter = Exporter::new(".");
// Export based on profile argument
if let Some(profile_name) = args.profile {
// Single profile export (backwards compatible)
let output_file = exporter
.export(&profile_name, &lockfile, &config, Path::new(output_path))
.await?;
println!("Export complete: {output_file:?}");
} else {
// Multi-profile export (Pakker-compatible default behavior)
let output_files = exporter
.export_all_profiles(&lockfile, &config, Path::new(output_path))
.await?;
println!("\nExported {} files:", output_files.len());
for output_file in output_files {
println!(" - {output_file:?}");
}
}
Ok(())
}
/// Merges parent lockfile with local lockfile
/// Parent projects are used as base, local projects override parent projects
/// with same slug
fn merge_lockfiles(
parent: LockFile,
local: LockFile,
local_config: &LocalConfig,
) -> Result<LockFile> {
let mut merged = LockFile {
target: parent.target, // Use parent target
mc_versions: parent.mc_versions, // Use parent MC versions
loaders: parent.loaders, // Use parent loaders
projects: Vec::new(),
lockfile_version: parent.lockfile_version,
};
// Collect local project slugs for override detection
let mut local_slugs = std::collections::HashSet::new();
for project in &local.projects {
// Add all slugs from all platforms
for slug in project.slug.values() {
local_slugs.insert(slug.clone());
}
}
// Add parent projects that are NOT overridden by local
let parent_projects_count = parent.projects.len();
for parent_project in &parent.projects {
let is_overridden = parent_project
.slug
.values()
.any(|slug| local_slugs.contains(slug));
if !is_overridden {
// Check if project has local config overrides
let mut project = parent_project.clone();
// Apply local config overrides if they exist
for (key, local_proj_cfg) in &local_config.projects {
// Match by slug, name, or pakku_id
let matches = project.slug.values().any(|s| s == key)
|| project.name.values().any(|n| n == key)
|| project.pakku_id.as_ref() == Some(key);
if matches {
if let Some(t) = local_proj_cfg.r#type {
project.r#type = t;
}
if let Some(s) = local_proj_cfg.side {
project.side = s;
}
if let Some(us) = local_proj_cfg.update_strategy {
project.update_strategy = us;
}
if let Some(r) = local_proj_cfg.redistributable {
project.redistributable = r;
}
if let Some(ref sp) = local_proj_cfg.subpath {
project.subpath = Some(sp.clone());
}
if let Some(ref aliases) = local_proj_cfg.aliases {
project.aliases = aliases.iter().cloned().collect();
}
if let Some(e) = local_proj_cfg.export {
project.export = e;
}
break;
}
}
merged.projects.push(project);
}
}
// Add all local projects
merged.projects.extend(local.projects.clone());
println!(
"Merged fork: {} parent projects + {} local projects = {} total projects",
parent_projects_count - local_config.projects.len(),
local.projects.len(),
merged.projects.len()
);
Ok(merged)
}

49
src/cli/commands/fetch.rs Normal file
View file

@ -0,0 +1,49 @@
use std::path::{Path, PathBuf};
use crate::{
cli::FetchArgs,
error::Result,
fetch::Fetcher,
ipc::{IpcCoordinator, OperationGuard, OperationType},
model::{Config, LockFile},
};
pub async fn execute(
args: FetchArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Create IPC coordinator for this modpack
let working_dir = PathBuf::from(".");
let coordinator = IpcCoordinator::new(&working_dir)?;
// Check for conflicting operations
if coordinator.has_running_operation(OperationType::Fetch) {
// Wait for conflicting operations to complete with timeout
let timeout = std::time::Duration::from_secs(args.timeout.unwrap_or(300));
coordinator
.wait_for_conflicts(OperationType::Fetch, timeout)
.await?;
}
// Register this fetch operation
let operation_id = coordinator.register_operation(OperationType::Fetch)?;
let _guard = OperationGuard::new(coordinator, operation_id);
// Create fetcher
let fetcher = Fetcher::new(".");
// Fetch all projects (progress indicators handled in fetch.rs)
fetcher.fetch_all(&lockfile, &config).await?;
println!("Fetch complete");
Ok(())
}

677
src/cli/commands/fork.rs Normal file
View file

@ -0,0 +1,677 @@
use std::{fs, io::Write, path::Path};
use crate::{
cli::ForkArgs,
error::PakkerError,
git::{self, VcsType},
model::{
config::Config,
fork::{ForkIntegrity, LocalConfig, ParentConfig, RefType, hash_content},
},
};
const PAKKU_DIR: &str = ".pakku";
const PARENT_DIR_NAME: &str = "parent";
fn parent_dir() -> String {
format!("{PAKKU_DIR}/{PARENT_DIR_NAME}")
}
/// Main entry point for fork commands
pub fn execute(args: &ForkArgs) -> Result<(), PakkerError> {
match &args.subcommand {
crate::cli::ForkSubcommand::Init {
git_url,
from_current,
from_path,
ref_name,
ref_type,
remote,
} => {
execute_init(
git_url.clone(),
*from_current,
from_path.clone(),
ref_name.clone(),
*ref_type,
remote.clone(),
)
},
crate::cli::ForkSubcommand::Set {
git_url,
ref_name,
ref_type,
remote,
} => {
execute_set(git_url.clone(), ref_name.clone(), *ref_type, remote.clone())
},
crate::cli::ForkSubcommand::Show => execute_show(),
crate::cli::ForkSubcommand::Unset => execute_unset(),
crate::cli::ForkSubcommand::Sync => execute_sync(),
crate::cli::ForkSubcommand::Promote { projects } => {
execute_promote(projects.clone())
},
}
}
fn validate_git_url(url: &str) -> Result<(), PakkerError> {
// Allow network URLs, SSH-style URLs, or local filesystem paths (tests use
// local bare repos)
if url.starts_with("https://")
|| url.starts_with("git@")
|| url.starts_with("ssh://")
|| url.starts_with("file://")
|| url.starts_with('/')
{
Ok(())
} else {
Err(PakkerError::Fork(format!(
"Invalid git URL: {url}. Expected https://, git@, ssh://, file://, or \
absolute filesystem path."
)))
}
}
fn execute_init(
git_url: Option<String>,
from_current: bool,
from_path: Option<String>,
ref_name: Option<String>,
ref_type: Option<RefType>,
remote: Option<String>,
) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
// Validate that pakker.json exists for fork operations
let pakker_json_path = config_dir.join("pakker.json");
let pakku_json_path = config_dir.join("pakku.json");
if !pakker_json_path.exists() && pakku_json_path.exists() {
return Err(PakkerError::Fork(
"Forking is a pakker-specific feature and requires pakker.json. \nFound \
pakku.json but not pakker.json. Please migrate to pakker.json to use \
fork functionality.\nYou can convert your pakku.json to pakker.json by \
renaming the file."
.to_string(),
));
}
let mut local_config = LocalConfig::load(config_dir).unwrap_or_default();
// Check if parent already configured
if local_config.parent.is_some()
&& let Some(parent) = &local_config.parent
{
return Err(PakkerError::Fork(format!(
"Parent already configured: {}",
parent.id
)));
}
// Resolve defaults early to avoid shadowing/confusion
let resolved_remote = remote.unwrap_or_else(|| "origin".to_string());
let resolved_ref = ref_name.unwrap_or_else(|| "main".to_string());
// Parent path (where we keep the cloned parent)
let parent_path_str = parent_dir();
// Branch: from_current, from_path, or git_url
let mut cloned_from_local = false;
let url = if from_current {
// Detect git URL from current directory
if !git::is_git_repository(config_dir) {
return Err(PakkerError::Fork(
"Not a git repository. Use --git-url or run 'git init' first."
.to_string(),
));
}
git::get_remote_url(config_dir, &resolved_remote)?
} else if let Some(fp) = from_path {
// Use provided local path as source; infer upstream remote from it
let path = Path::new(&fp);
if !git::is_git_repository(path) {
return Err(PakkerError::Fork(format!(
"Provided path is not a git repository: {}",
path.display()
)));
}
// Infer upstream remote URL from the existing local clone
let upstream_url = git::get_primary_remote_url(path)?;
// Reject file:// or non-network remotes
validate_git_url(&upstream_url)?;
// Ensure working tree is clean
let vcs_type = git::detect_vcs_type(path);
if git::repo_has_uncommitted_changes(path)? {
let error_msg = match vcs_type {
VcsType::Git => {
"Local repository at --from-path has uncommitted changes. Commit or \
stash them before proceeding."
},
VcsType::Jujutsu => {
"Local repository at --from-path has uncommitted changes. Run 'jj \
commit' to save changes before proceeding."
},
VcsType::None => {
"Local repository at --from-path has uncommitted changes. Please \
clean the directory before proceeding."
},
};
return Err(PakkerError::Fork(error_msg.to_string()));
}
// VCS-specific validation
match vcs_type {
VcsType::Git => {
// Attempt lightweight fetch of remote refs to refresh remote tracking
match git::fetch_remote_light(path, &resolved_remote, &resolved_ref) {
Ok(()) => println!("Fetched remote refs for verification"),
Err(e) => {
log::warn!("Lightweight fetch from upstream failed: {e}");
println!(
"Warning: could not perform lightweight fetch from upstream. \
Proceeding with local clone; subsequent sync may require \
network."
);
},
}
// Compare local ref vs remote ref
let remote_ref = format!("{resolved_remote}/{resolved_ref}");
match git::ahead_behind(path, &resolved_ref, &remote_ref) {
Ok((ahead, _behind)) => {
if ahead > 0 {
return Err(PakkerError::Fork(format!(
"Local repository at {} has {} commits not present on \
upstream {}. Push or use --git-url if you intend to use an \
upstream that contains these commits.",
path.display(),
ahead,
upstream_url
)));
}
},
Err(e) => {
log::warn!("Could not compute ahead/behind: {e}");
},
}
},
VcsType::Jujutsu => {
// For jujutsu, we skip git-specific remote validation since jj has
// different synchronization patterns
println!(
"Warning: Skipping remote validation for jujutsu repository. Ensure \
your jj repo is in sync with remote before proceeding."
);
// Check if there are any changes that haven't been pushed to the remote
if let Ok(output) = std::process::Command::new("jj")
.args(["log", "--limit", "1", "--template", ""])
.current_dir(path)
.output()
{
if !output.stdout.is_empty() {
println!(
"Note: Jujutsu repository detected. Make sure to run 'jj git \
push' to sync changes with remote if needed."
);
}
}
},
VcsType::None => {
// No VCS-specific validation needed
},
}
// Compute parent lock/config hashes for reproducibility
let parent_lock_path = if path.join("pakker-lock.json").exists() {
path.join("pakker-lock.json")
} else {
path.join("pakku-lock.json")
};
if parent_lock_path.exists() {
let lock_content =
fs::read_to_string(&parent_lock_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
})?;
let lock_hash = hash_content(&lock_content);
local_config.parent_lock_hash = Some(lock_hash);
}
let parent_config_path = if path.join("pakker.json").exists() {
path.join("pakker.json")
} else {
path.join("pakku.json")
};
if parent_config_path.exists() {
let config_content =
fs::read_to_string(&parent_config_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent config: {e}"))
})?;
let config_hash = hash_content(&config_content);
local_config.parent_config_hash = Some(config_hash);
}
// Now clone from the local path into .pakku/parent — this avoids
// re-downloading objects
let parent_path = Path::new(&parent_path_str);
println!(
"Cloning parent repository from local path {}...",
path.display()
);
git::clone_repository(&fp, parent_path, &resolved_ref, None)?;
// Ensure the cloned repo's origin is set to the upstream URL (not the local
// path)
git::set_remote_url(parent_path, &resolved_remote, &upstream_url)?;
// Mark that we've already cloned from local
cloned_from_local = true;
// We will persist upstream_url as the canonical parent id
upstream_url
} else if let Some(url) = git_url {
url
} else {
return Err(PakkerError::Fork(
"Either --git-url, --from-current or --from-path must be specified"
.to_string(),
));
};
let parent_path = Path::new(&parent_path_str);
// If we did not already clone from local, perform network clone and checks
if cloned_from_local {
println!(
"Parent repository was cloned from local path; skipping network clone."
);
} else {
// Check if parent directory already exists and is not empty
if parent_path.exists() {
let is_empty = parent_path
.read_dir()
.map(|mut entries| entries.next().is_none())
.unwrap_or(false);
if !is_empty {
return Err(PakkerError::Fork(format!(
"Directory not empty: {}",
parent_path.display()
)));
}
}
println!("Cloning parent repository...");
println!(" URL: {url}");
println!(" Ref: {resolved_ref}");
git::clone_repository(&url, parent_path, &resolved_ref, None)?;
}
let commit_sha = git::get_commit_sha(parent_path, &resolved_ref)?;
// Detect ref type if not specified
let resolved_ref_type = if let Some(rt) = ref_type {
rt
} else {
git::resolve_ref_type(parent_path, &resolved_ref)?
};
let parent_config = ParentConfig {
type_: "git".to_string(),
id: url.clone(),
version: Some(commit_sha[..8].to_string()),
ref_: resolved_ref.clone(),
ref_type: resolved_ref_type,
remote_name: resolved_remote,
};
local_config.parent = Some(parent_config);
local_config.save(config_dir)?;
// Add .pakku/parent to .gitignore
add_to_gitignore()?;
println!();
println!("✓ Fork initialized successfully");
println!(" Parent: {url}");
println!(" Ref: {} ({})", resolved_ref, match resolved_ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!(" Commit: {}", &commit_sha[..8]);
println!();
println!("Run 'pakku fork sync' to sync with parent.");
Ok(())
}
fn execute_set(
git_url: Option<String>,
ref_name: String,
ref_type: Option<RefType>,
remote: Option<String>,
) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
let mut parent = local_config.parent.unwrap();
if let Some(url) = git_url {
validate_git_url(&url)?;
parent.id = url;
}
parent.ref_ = ref_name;
if let Some(rt) = ref_type {
parent.ref_type = rt;
}
if let Some(remote_name) = remote {
parent.remote_name = remote_name;
}
local_config.parent = Some(parent.clone());
local_config.save(config_dir)?;
println!("✓ Fork configuration updated");
println!(" Parent: {}", parent.id);
println!(" Ref: {} ({})", parent.ref_, match parent.ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!();
println!("Run 'pakku fork sync' to sync with new configuration.");
Ok(())
}
fn execute_show() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
if let Some(parent) = local_config.parent {
println!("Fork Configuration:");
println!(" Parent URL: {}", parent.id);
println!(" Type: {}", match parent.ref_type {
RefType::Branch => "branch",
RefType::Tag => "tag",
RefType::Commit => "commit",
});
println!(" Ref: {}", parent.ref_);
println!(" Remote: {}", parent.remote_name);
if let Some(version) = parent.version {
println!(" Last synced commit: {version}");
} else {
println!(" Last synced commit: never synced");
}
if !local_config.projects.is_empty() {
println!();
println!("Project Overrides ({}):", local_config.projects.len());
for (slug, proj_config) in &local_config.projects {
print!(" - {slug}");
let mut details = Vec::new();
if let Some(version) = &proj_config.version {
details.push(format!("version={version}"));
}
if let Some(side) = &proj_config.side {
details.push(format!("side={side}"));
}
if let Some(strategy) = &proj_config.update_strategy {
details.push(format!("updateStrategy={strategy}"));
}
if !details.is_empty() {
print!(" ({})", details.join(", "));
}
println!();
}
}
} else {
println!("No fork configured.");
println!("Run 'pakku fork init' to initialize a fork.");
}
Ok(())
}
fn execute_unset() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
println!("No fork configured.");
return Ok(());
}
// Prompt for confirmation
print!("Are you sure you want to remove fork configuration? [y/N] ");
std::io::stdout().flush().unwrap();
let mut input = String::new();
std::io::stdin().read_line(&mut input).unwrap();
if !input.trim().eq_ignore_ascii_case("y") {
println!("Cancelled.");
return Ok(());
}
// Remove parent directory
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if parent_path.exists() {
fs::remove_dir_all(parent_path).map_err(|e| {
PakkerError::Fork(format!("Failed to remove parent directory: {e}"))
})?;
}
// Clear parent configuration
local_config.parent = None;
local_config.parent_lock_hash = None;
local_config.parent_config_hash = None;
local_config.save(config_dir)?;
println!("✓ Fork configuration removed");
Ok(())
}
fn execute_sync() -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let mut local_config = LocalConfig::load(config_dir)?;
let parent = local_config.parent.as_ref().ok_or_else(|| {
PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
)
})?;
let parent_path_str = parent_dir();
let parent_path = Path::new(&parent_path_str);
if parent_path.exists() {
println!("Fetching parent updates...");
git::fetch_updates(parent_path, &parent.remote_name, &parent.ref_, None)?;
git::reset_to_ref(parent_path, &parent.remote_name, &parent.ref_)?;
} else {
println!("Parent repository not found. Cloning...");
git::clone_repository(&parent.id, parent_path, &parent.ref_, None)?;
}
let commit_sha = git::get_commit_sha(parent_path, &parent.ref_)?;
let mut integrity = None;
// Try pakker files first, fall back to pakku files
let parent_lock_path = if parent_path.join("pakker-lock.json").exists() {
parent_path.join("pakker-lock.json")
} else {
parent_path.join("pakku-lock.json")
};
let parent_config_path = if parent_path.join("pakker.json").exists() {
parent_path.join("pakker.json")
} else {
parent_path.join("pakku.json")
};
if parent_lock_path.exists() {
let lock_content = fs::read_to_string(&parent_lock_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent lock file: {e}"))
})?;
let lock_hash = hash_content(&lock_content);
if let Some(prev_hash) = &local_config.parent_lock_hash
&& prev_hash != &lock_hash
{
log::warn!("Parent lock file has changed since last sync");
log::warn!(" Previous hash: {prev_hash}");
log::warn!(" Current hash: {lock_hash}");
}
local_config.parent_lock_hash = Some(lock_hash);
let config_content = if parent_config_path.exists() {
fs::read_to_string(&parent_config_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read parent config: {e}"))
})?
} else {
String::new()
};
let config_hash = hash_content(&config_content);
if let Some(prev_hash) = &local_config.parent_config_hash
&& prev_hash != &config_hash
{
log::warn!("Parent config file has changed since last sync");
log::warn!(" Previous hash: {prev_hash}");
log::warn!(" Current hash: {config_hash}");
}
local_config.parent_config_hash = Some(config_hash);
integrity = Some(ForkIntegrity::new(
local_config.parent_lock_hash.clone().unwrap_or_default(),
commit_sha.clone(),
local_config.parent_config_hash.clone().unwrap_or_default(),
));
}
if let Some(ref integrity_data) = integrity {
log::info!(
"Parent integrity verified at timestamp {}",
integrity_data.verified_at
);
}
if let Some(parent) = local_config.parent.as_mut() {
parent.version = Some(commit_sha[..8].to_string());
}
local_config.save(config_dir)?;
println!();
println!("✓ Parent sync complete");
println!(" Commit: {}", &commit_sha[..8]);
println!();
println!("Run 'pakku export' to merge changes from parent.");
Ok(())
}
fn execute_promote(projects: Vec<String>) -> Result<(), PakkerError> {
let config_dir = Path::new(".");
let local_config = LocalConfig::load(config_dir)?;
if local_config.parent.is_none() {
return Err(PakkerError::Fork(
"No parent configured. Run 'pakku fork init' first.".to_string(),
));
}
if projects.is_empty() {
return Err(PakkerError::Fork(
"No projects specified. Usage: pakku fork promote <project>..."
.to_string(),
));
}
// Load current config
let config = Config::load(config_dir)?;
// Verify all projects exist
for project_arg in &projects {
let found = config
.projects
.as_ref()
.and_then(|projs| projs.get(project_arg))
.is_some();
if !found {
return Err(PakkerError::Fork(format!(
"Project not found: {project_arg}"
)));
}
}
println!("Note: In the current architecture, projects in pakku.json are");
println!("automatically merged with parent projects during export.");
println!();
println!("The following projects are already in pakku.json:");
for project in &projects {
println!(" - {project}");
}
println!();
println!("These will be included in exports automatically.");
Ok(())
}
fn add_to_gitignore() -> Result<(), PakkerError> {
let gitignore_path = Path::new(".gitignore");
let parent_dir = parent_dir();
// Check if .gitignore exists and already contains the entry
if gitignore_path.exists() {
let content = fs::read_to_string(gitignore_path).map_err(|e| {
PakkerError::Fork(format!("Failed to read .gitignore: {e}"))
})?;
if content.lines().any(|line| line.trim() == parent_dir) {
return Ok(());
}
}
// Append to .gitignore
let mut file = fs::OpenOptions::new()
.create(true)
.append(true)
.open(gitignore_path)
.map_err(|e| {
PakkerError::Fork(format!("Failed to open .gitignore: {e}"))
})?;
writeln!(file, "{parent_dir}").map_err(|e| {
PakkerError::Fork(format!("Failed to write to .gitignore: {e}"))
})?;
Ok(())
}

395
src/cli/commands/import.rs Normal file
View file

@ -0,0 +1,395 @@
use std::path::Path;
use crate::{
cli::ImportArgs,
error::{PakkerError, Result},
model::{Config, LockFile, Target},
ui_utils::prompt_yes_no,
};
pub async fn execute(
args: ImportArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
log::info!("Importing modpack from {}", args.file);
let path = Path::new(&args.file);
if !path.exists() {
return Err(PakkerError::FileNotFound(
path.to_string_lossy().to_string(),
));
}
// Check if lockfile or config already exist
if (lockfile_path.exists() || config_path.exists()) && !args.yes {
let msg = if lockfile_path.exists() && config_path.exists() {
"Both pakku-lock.json and pakku.json exist. Importing will overwrite \
them. Continue?"
} else if lockfile_path.exists() {
"pakku-lock.json exists. Importing will overwrite it. Continue?"
} else {
"pakku.json exists. Importing will overwrite it. Continue?"
};
if !prompt_yes_no(msg, false)? {
log::info!("Import cancelled by user");
return Ok(());
}
}
// Detect format by checking file contents
let file = std::fs::File::open(path)?;
let mut archive = zip::ZipArchive::new(file)?;
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
if archive.by_name("modrinth.index.json").is_ok() {
drop(archive);
import_modrinth(path, lockfile_dir, config_dir).await
} else if archive.by_name("manifest.json").is_ok() {
drop(archive);
import_curseforge(path, lockfile_dir, config_dir).await
} else {
Err(PakkerError::InvalidImportFile(
"Unknown pack format".to_string(),
))
}
}
async fn import_modrinth(
path: &Path,
lockfile_dir: &Path,
config_dir: &Path,
) -> Result<()> {
use std::{fs::File, io::Read};
use zip::ZipArchive;
use crate::platform::create_platform;
let file = File::open(path)?;
let mut archive = ZipArchive::new(file)?;
let index_content = {
let mut index_file = archive.by_name("modrinth.index.json")?;
let mut content = String::new();
index_file.read_to_string(&mut content)?;
content
};
let index: serde_json::Value = serde_json::from_str(&index_content)?;
// Create lockfile
let mc_version = index["dependencies"]["minecraft"]
.as_str()
.unwrap_or("1.20.1")
.to_string();
let loader =
if let Some(fabric) = index["dependencies"]["fabric-loader"].as_str() {
("fabric".to_string(), fabric.to_string())
} else if let Some(forge) = index["dependencies"]["forge"].as_str() {
("forge".to_string(), forge.to_string())
} else {
("fabric".to_string(), "latest".to_string())
};
let mut loaders = std::collections::HashMap::new();
loaders.insert(loader.0.clone(), loader.1);
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec![mc_version.clone()],
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
// Import projects from files list
if let Some(files) = index["files"].as_array() {
log::info!("Importing {} projects from modpack", files.len());
// Create platform client
let creds = crate::model::credentials::ResolvedCredentials::load().ok();
let platform = create_platform(
"modrinth",
creds
.as_ref()
.and_then(|c| c.modrinth_token().map(std::string::ToString::to_string)),
)?;
for file_entry in files {
if let Some(project_id) = file_entry["downloads"]
.as_array()
.and_then(|downloads| downloads.first())
.and_then(|url| url.as_str())
.and_then(|url| url.split('/').rev().nth(1))
{
log::info!("Fetching project: {project_id}");
match platform
.request_project_with_files(project_id, &lockfile.mc_versions, &[
loader.0.clone(),
])
.await
{
Ok(mut project) => {
// Select best file
if let Err(e) =
project.select_file(&lockfile.mc_versions, &[loader.0.clone()])
{
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
e
);
continue;
}
lockfile.add_project(project);
},
Err(e) => {
log::warn!("Failed to fetch project {project_id}: {e}");
},
}
}
}
}
// Create config
let config = Config {
name: index["name"]
.as_str()
.unwrap_or("Imported Pack")
.to_string(),
version: index["versionId"]
.as_str()
.unwrap_or("1.0.0")
.to_string(),
description: index["summary"]
.as_str()
.map(std::string::ToString::to_string),
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: Default::default(),
projects: None,
export_profiles: None,
};
// Save files using provided paths
lockfile.save(lockfile_dir)?;
config.save(config_dir)?;
log::info!("Imported {} projects", lockfile.projects.len());
// Extract overrides
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = file.enclosed_name().ok_or_else(|| {
PakkerError::InternalError("Invalid file path in archive".to_string())
})?;
if outpath.starts_with("overrides/") {
let target = outpath.strip_prefix("overrides/").unwrap();
if file.is_dir() {
std::fs::create_dir_all(target)?;
} else {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
let mut outfile = File::create(target)?;
std::io::copy(&mut file, &mut outfile)?;
}
}
}
Ok(())
}
async fn import_curseforge(
path: &Path,
lockfile_dir: &Path,
config_dir: &Path,
) -> Result<()> {
use std::{fs::File, io::Read};
use zip::ZipArchive;
let file = File::open(path)?;
let mut archive = ZipArchive::new(file)?;
let manifest_content = {
let mut manifest_file = archive.by_name("manifest.json")?;
let mut content = String::new();
manifest_file.read_to_string(&mut content)?;
content
};
let manifest: serde_json::Value = serde_json::from_str(&manifest_content)?;
// Create lockfile
let mc_version = manifest["minecraft"]["version"]
.as_str()
.unwrap_or("1.20.1")
.to_string();
let mod_loaders =
manifest["minecraft"]["modLoaders"]
.as_array()
.ok_or_else(|| {
PakkerError::InvalidImportFile("Missing modLoaders".to_string())
})?;
let loader_info = mod_loaders
.first()
.and_then(|l| l["id"].as_str())
.ok_or_else(|| {
PakkerError::InvalidImportFile("Missing loader id".to_string())
})?;
let parts: Vec<&str> = loader_info.split('-').collect();
let loader_name = (*parts.first().unwrap_or(&"fabric")).to_string();
let loader_version = (*parts.get(1).unwrap_or(&"latest")).to_string();
let mut loaders = std::collections::HashMap::new();
loaders.insert(loader_name, loader_version);
let mut lockfile = LockFile {
target: Some(Target::CurseForge),
mc_versions: vec![mc_version.clone()],
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
// Import projects from files list
if let Some(files) = manifest["files"].as_array() {
log::info!("Importing {} projects from modpack", files.len());
// Create platform client
use crate::platform::create_platform;
let curseforge_token = std::env::var("CURSEFORGE_TOKEN").ok();
let platform = create_platform("curseforge", curseforge_token)?;
for file_entry in files {
if let Some(project_id) = file_entry["projectID"].as_u64() {
let project_id_str = project_id.to_string();
log::info!("Fetching project: {project_id_str}");
match platform
.request_project_with_files(
&project_id_str,
&lockfile.mc_versions,
&loaders.keys().cloned().collect::<Vec<_>>(),
)
.await
{
Ok(mut project) => {
// Try to select the specific file if fileID is provided
if let Some(file_id) = file_entry["fileID"].as_u64() {
let file_id_str = file_id.to_string();
// Try to find the file with matching ID
if let Some(file) =
project.files.iter().find(|f| f.id == file_id_str).cloned()
{
project.files = vec![file];
} else {
log::warn!(
"Could not find file {} for project {}, selecting best match",
file_id,
project.get_name()
);
if let Err(e) = project.select_file(
&lockfile.mc_versions,
&loaders.keys().cloned().collect::<Vec<_>>(),
) {
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
e
);
continue;
}
}
} else {
// No specific file ID, select best match
if let Err(e) = project.select_file(
&lockfile.mc_versions,
&loaders.keys().cloned().collect::<Vec<_>>(),
) {
log::warn!(
"Failed to select file for {}: {}",
project.get_name(),
e
);
continue;
}
}
lockfile.add_project(project);
},
Err(e) => {
log::warn!("Failed to fetch project {project_id_str}: {e}");
},
}
}
}
}
// Create config
let config = Config {
name: manifest["name"]
.as_str()
.unwrap_or("Imported Pack")
.to_string(),
version: manifest["version"]
.as_str()
.unwrap_or("1.0.0")
.to_string(),
description: None,
author: manifest["author"]
.as_str()
.map(std::string::ToString::to_string),
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: Default::default(),
projects: None,
export_profiles: None,
};
// Save files using provided paths
lockfile.save(lockfile_dir)?;
config.save(config_dir)?;
log::info!("Imported {} projects", lockfile.projects.len());
// Extract overrides
let overrides_prefix = manifest["overrides"].as_str().unwrap_or("overrides");
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = file.enclosed_name().ok_or_else(|| {
PakkerError::InternalError("Invalid file path in archive".to_string())
})?;
if outpath.starts_with(overrides_prefix) {
let target = outpath.strip_prefix(overrides_prefix).unwrap();
if file.is_dir() {
std::fs::create_dir_all(target)?;
} else {
if let Some(parent) = target.parent() {
std::fs::create_dir_all(parent)?;
}
let mut outfile = File::create(target)?;
std::io::copy(&mut file, &mut outfile)?;
}
}
}
Ok(())
}

67
src/cli/commands/init.rs Normal file
View file

@ -0,0 +1,67 @@
use std::{collections::HashMap, path::Path};
use crate::{
cli::InitArgs,
error::PakkerError,
model::{Config, LockFile, Target},
};
pub async fn execute(
args: InitArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<(), PakkerError> {
if lockfile_path.exists() {
return Err(PakkerError::AlreadyExists(
"Lock file already exists".into(),
));
}
let target = args.target.as_str();
let target_enum = match target {
"curseforge" => Target::CurseForge,
"modrinth" => Target::Modrinth,
"multiplatform" => Target::Multiplatform,
_ => {
return Err(PakkerError::InvalidInput(format!(
"Invalid target: {target}"
)));
},
};
let mc_versions = vec![args.mc_version];
let mut loaders = HashMap::new();
loaders.insert(args.loader, args.loader_version);
let lockfile = LockFile {
target: Some(target_enum),
mc_versions,
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
// Save expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
lockfile.save(lockfile_dir)?;
let config = Config {
name: args.name.unwrap_or_else(|| "My Modpack".to_string()),
version: args.version.unwrap_or_else(|| "1.0.0".to_string()),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
let config_dir = config_path.parent().unwrap_or(Path::new("."));
config.save(config_dir)?;
println!("Initialized new modpack with target: {target}");
Ok(())
}

596
src/cli/commands/inspect.rs Normal file
View file

@ -0,0 +1,596 @@
use std::{collections::HashSet, path::Path};
use comfy_table::{Cell, Color, ContentArrangement, Table, presets};
use strsim::levenshtein;
use yansi::Paint;
use crate::{
error::Result,
model::{Config, LockFile, Project, ProjectFile},
};
pub async fn execute(
projects: Vec<String>,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
let _config = Config::load(config_dir)?;
let mut found_any = false;
let total_projects = projects.len();
for (idx, project_input) in projects.iter().enumerate() {
if let Some(project) = find_project(&lockfile, project_input) {
display_project_inspection(project, &lockfile)?;
found_any = true;
// Add separator between projects (but not after the last one)
if idx < total_projects - 1 {
let width = 80; // Default terminal width
println!("{}", "".repeat(width));
println!();
}
} else {
eprintln!(
"{}: {}",
"Error".red(),
format!("Project '{project_input}' not found in lockfile.").red()
);
// Suggest similar projects
if let Some(suggestions) =
find_similar_projects(&lockfile, project_input, 5)
{
eprintln!();
eprintln!("{}", "Did you mean one of these?".yellow());
for suggestion in suggestions {
eprintln!(" - {}", suggestion.cyan());
}
}
eprintln!();
}
}
if !found_any && !projects.is_empty() {
return Err(crate::error::PakkerError::ProjectNotFound(
"No projects found".to_string(),
));
}
Ok(())
}
fn find_project<'a>(
lockfile: &'a LockFile,
query: &str,
) -> Option<&'a Project> {
lockfile.projects.iter().find(|p| project_matches(p, query))
}
fn project_matches(project: &Project, query: &str) -> bool {
// Check slugs
for slug in project.slug.values() {
if slug.eq_ignore_ascii_case(query) {
return true;
}
}
// Check names
for name in project.name.values() {
if name.eq_ignore_ascii_case(query) {
return true;
}
}
// Check pakku_id
if let Some(ref pakku_id) = project.pakku_id
&& pakku_id.eq_ignore_ascii_case(query)
{
return true;
}
// Check aliases
for alias in &project.aliases {
if alias.eq_ignore_ascii_case(query) {
return true;
}
}
false
}
fn find_similar_projects(
lockfile: &LockFile,
query: &str,
max_results: usize,
) -> Option<Vec<String>> {
// Calculate similarity scores for all projects
let mut candidates: Vec<(String, usize)> = lockfile
.projects
.iter()
.flat_map(|p| {
let mut scores = Vec::new();
// Check slug similarity
for slug in p.slug.values() {
let distance = levenshtein(slug, query);
if distance <= 3 {
scores.push((slug.clone(), distance));
}
}
// Check name similarity (case-insensitive)
for name in p.name.values() {
let distance = levenshtein(&name.to_lowercase(), &query.to_lowercase());
if distance <= 3 {
scores.push((name.clone(), distance));
}
}
// Check aliases
for alias in &p.aliases {
let distance = levenshtein(alias, query);
if distance <= 3 {
scores.push((alias.clone(), distance));
}
}
scores
})
.collect();
if candidates.is_empty() {
return None;
}
// Sort by distance (closest first)
candidates.sort_by_key(|(_, dist)| *dist);
// Deduplicate and take top N
let mut seen = HashSet::new();
let suggestions: Vec<String> = candidates
.into_iter()
.filter_map(|(name, _)| {
if seen.insert(name.clone()) {
Some(name)
} else {
None
}
})
.take(max_results)
.collect();
Some(suggestions)
}
fn display_project_inspection(
project: &Project,
lockfile: &LockFile,
) -> Result<()> {
// Display project header panel
display_project_header(project)?;
// Display project files
println!();
display_project_files(&project.files)?;
// Display properties
println!();
display_properties(project)?;
// Display dependency tree
println!();
display_dependencies(project, lockfile)?;
println!();
Ok(())
}
fn display_project_header(project: &Project) -> Result<()> {
let name = get_project_name(project);
let default_slug = String::from("N/A");
let slug = project.slug.values().next().unwrap_or(&default_slug);
// Create header table with comfy-table
let mut table = Table::new();
table
.load_preset(presets::UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
// Title row with name
table.add_row(vec![
Cell::new(name)
.fg(Color::Cyan)
.set_alignment(comfy_table::CellAlignment::Left),
]);
// Second row with slug, type, side
let metadata = format!(
"{} ({}) • {} • {}",
slug,
project.id.keys().next().unwrap_or(&"unknown".to_string()),
format!("{:?}", project.r#type).to_lowercase(),
format!("{:?}", project.side).to_lowercase()
);
table.add_row(vec![
Cell::new(metadata)
.fg(Color::DarkGrey)
.set_alignment(comfy_table::CellAlignment::Left),
]);
println!("{table}");
Ok(())
}
fn display_project_files(files: &[ProjectFile]) -> Result<()> {
if files.is_empty() {
println!("{}", "No files available".yellow());
return Ok(());
}
println!("{}", "Project Files".cyan().bold());
for (idx, file) in files.iter().enumerate() {
let mut table = Table::new();
table
.load_preset(presets::UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic);
// Mark the first file as "current"
let status = if idx == 0 { "current" } else { "" };
let status_text = if status.is_empty() {
String::new()
} else {
format!(" {status}")
};
// File path line
let file_path = format!("{}={}", file.file_type, file.file_name);
table.add_row(vec![
Cell::new(format!("{file_path}:{status_text}")).fg(if idx == 0 {
Color::Green
} else {
Color::White
}),
]);
// Date published
table.add_row(vec![Cell::new(&file.date_published).fg(Color::DarkGrey)]);
// Empty line
table.add_row(vec![Cell::new("")]);
// Hashes (truncated)
if !file.hashes.is_empty() {
for (hash_type, hash_value) in &file.hashes {
let display_hash = if hash_value.len() > 32 {
format!(
"{}...{}",
&hash_value[..16],
&hash_value[hash_value.len() - 16..]
)
} else {
hash_value.clone()
};
table.add_row(vec![
Cell::new(format!("{hash_type}={display_hash}")).fg(Color::DarkGrey),
]);
}
}
println!("{table}");
println!();
}
Ok(())
}
fn display_properties(project: &Project) -> Result<()> {
println!("{}", "Properties".cyan().bold());
println!(
" {}={}",
"type".yellow(),
format!("{:?}", project.r#type).to_lowercase()
);
println!(
" {}={}",
"side".yellow(),
format!("{:?}", project.side).to_lowercase()
);
println!(
" {}={}",
"update_strategy".yellow(),
format!("{:?}", project.update_strategy).to_lowercase()
);
println!(
" {}={}",
"redistributable".yellow(),
project.redistributable
);
if let Some(subpath) = &project.subpath {
println!(" {}={}", "subpath".yellow(), subpath);
}
if !project.aliases.is_empty() {
let aliases: Vec<_> = project.aliases.iter().cloned().collect();
println!(" {}={}", "aliases".yellow(), aliases.join(", "));
}
Ok(())
}
fn display_dependencies(project: &Project, lockfile: &LockFile) -> Result<()> {
println!("{}", "Dependencies".cyan().bold());
// Collect all dependencies from all files
let mut all_deps = HashSet::new();
for file in &project.files {
for dep in &file.required_dependencies {
all_deps.insert(dep.clone());
}
}
if all_deps.is_empty() {
println!(" {}", "No dependencies".dim());
return Ok(());
}
// Display dependency tree
let mut visited = HashSet::new();
for dep_id in all_deps {
display_dependency_tree(&dep_id, lockfile, 1, &mut visited)?;
}
Ok(())
}
fn display_dependency_tree(
dep_id: &str,
lockfile: &LockFile,
depth: usize,
visited: &mut HashSet<String>,
) -> Result<()> {
let indent = " ".repeat(depth);
let tree_char = if depth == 1 { "└─" } else { "├─" };
// Find the project in lockfile
let project = lockfile.projects.iter().find(|p| {
// Check if any ID matches
p.id.values().any(|id| id == dep_id)
|| p.slug.values().any(|slug| slug == dep_id)
|| p.pakku_id.as_ref() == Some(&dep_id.to_string())
});
if let Some(proj) = project {
let name = get_project_name(proj);
// Check for circular dependency
if visited.contains(&name) {
println!("{}{} {} {}", indent, tree_char, name, "(circular)".red());
return Ok(());
}
println!("{}{} {} (required)", indent, tree_char, name.green());
visited.insert(name);
// Recursively display nested dependencies (limit depth to avoid infinite
// loops)
if depth < 5 {
for file in &proj.files {
for nested_dep in &file.required_dependencies {
display_dependency_tree(nested_dep, lockfile, depth + 1, visited)?;
}
}
}
} else {
// Dependency not found in lockfile
println!(
"{}{} {} {}",
indent,
tree_char,
dep_id,
"(not in lockfile)".yellow()
);
}
Ok(())
}
fn get_project_name(project: &Project) -> String {
project
.name
.values()
.next()
.or_else(|| project.slug.values().next())
.cloned()
.unwrap_or_else(|| "Unknown".to_string())
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::*;
use crate::model::enums::{
ProjectSide,
ProjectType,
ReleaseType,
UpdateStrategy,
};
fn create_test_project(pakku_id: &str, slug: &str, name: &str) -> Project {
let mut slug_map = HashMap::new();
slug_map.insert("modrinth".to_string(), slug.to_string());
let mut name_map = HashMap::new();
name_map.insert("modrinth".to_string(), name.to_string());
let mut id_map = HashMap::new();
id_map.insert("modrinth".to_string(), pakku_id.to_string());
Project {
pakku_id: Some(pakku_id.to_string()),
pakku_links: HashSet::new(),
r#type: ProjectType::Mod,
side: ProjectSide::Both,
slug: slug_map,
name: name_map,
id: id_map,
update_strategy: UpdateStrategy::Latest,
redistributable: true,
subpath: None,
aliases: HashSet::new(),
export: true,
files: vec![],
}
}
fn create_test_lockfile(projects: Vec<Project>) -> LockFile {
use crate::model::enums::Target;
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
for project in projects {
lockfile.add_project(project);
}
lockfile
}
#[test]
fn test_find_project_by_slug() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "test-slug");
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
}
#[test]
fn test_find_project_by_name() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "test mod"); // Case-insensitive
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
}
#[test]
fn test_find_project_by_pakku_id() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "test-id");
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
}
#[test]
fn test_find_project_not_found() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
let found = find_project(&lockfile, "nonexistent");
assert!(found.is_none());
}
#[test]
fn test_fuzzy_matching_close_match() {
let project1 = create_test_project("id1", "fabric-api", "Fabric API");
let project2 = create_test_project("id2", "sodium", "Sodium");
let lockfile = create_test_lockfile(vec![project1, project2]);
// Typo: "fabrc-api" should suggest "fabric-api"
let suggestions = find_similar_projects(&lockfile, "fabrc-api", 5);
assert!(suggestions.is_some());
let suggestions = suggestions.unwrap();
assert!(!suggestions.is_empty());
assert!(suggestions.contains(&"fabric-api".to_string()));
}
#[test]
fn test_fuzzy_matching_no_match() {
let project = create_test_project("test-id", "test-slug", "Test Mod");
let lockfile = create_test_lockfile(vec![project]);
// Very different query, should have no suggestions (distance > 3)
let suggestions =
find_similar_projects(&lockfile, "completely-different-xyz", 5);
assert!(suggestions.is_none() || suggestions.unwrap().is_empty());
}
#[test]
fn test_project_matches_alias() {
let mut project = create_test_project("test-id", "test-slug", "Test Mod");
project.aliases.insert("test-alias".to_string());
assert!(project_matches(&project, "test-alias"));
}
#[test]
fn test_circular_dependency_detection() {
// This is a conceptual test - in practice, we'd need to set up files with
// dependencies
let mut project1 = create_test_project("dep1", "dep1-slug", "Dependency 1");
let mut project2 = create_test_project("dep2", "dep2-slug", "Dependency 2");
// Create files with circular dependencies
let file1 = ProjectFile {
file_type: "modrinth".to_string(),
file_name: "dep1.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/dep1.jar".to_string(),
id: "file1".to_string(),
parent_id: "dep1".to_string(),
hashes: HashMap::new(),
required_dependencies: vec!["dep2".to_string()],
size: 1000,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let file2 = ProjectFile {
file_type: "modrinth".to_string(),
file_name: "dep2.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/dep2.jar".to_string(),
id: "file2".to_string(),
parent_id: "dep2".to_string(),
hashes: HashMap::new(),
required_dependencies: vec!["dep1".to_string()],
size: 1000,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
project1.files.push(file1);
project2.files.push(file2);
let lockfile = create_test_lockfile(vec![project1, project2]);
// Test that display_dependency_tree handles circular deps gracefully
let mut visited = HashSet::new();
let result = display_dependency_tree("dep1", &lockfile, 1, &mut visited);
assert!(result.is_ok());
}
}

47
src/cli/commands/link.rs Normal file
View file

@ -0,0 +1,47 @@
use std::path::Path;
use crate::{
cli::LinkArgs,
error::{PakkerError, Result},
model::LockFile,
};
pub fn execute(args: LinkArgs, lockfile_path: &Path) -> Result<()> {
log::info!("Linking {} -> {}", args.from, args.to);
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Find projects
let from_project = lockfile
.projects
.iter()
.find(|p| p.matches_input(&args.from))
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
PakkerError::InvalidProject("From project has no pakku_id".to_string())
})?;
let to_project = lockfile
.projects
.iter_mut()
.find(|p| p.matches_input(&args.to))
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
// Check if link already exists
if to_project.pakku_links.contains(&from_id) {
log::info!("Link already exists");
return Ok(());
}
// Add link
to_project.pakku_links.insert(from_id);
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully linked projects");
Ok(())
}

50
src/cli/commands/ls.rs Normal file
View file

@ -0,0 +1,50 @@
use std::path::Path;
use crate::{cli::LsArgs, error::Result, model::LockFile};
pub fn execute(args: LsArgs, lockfile_path: &Path) -> Result<()> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
if lockfile.projects.is_empty() {
println!("No projects installed");
return Ok(());
}
println!("Installed projects ({}):", lockfile.projects.len());
println!();
for project in &lockfile.projects {
if args.detailed {
let id = project.pakku_id.as_deref().unwrap_or("unknown");
println!(" {} ({})", project.get_name(), id);
println!(" Type: {:?}", project.r#type);
println!(" Side: {:?}", project.side);
if let Some(file) = project.files.first() {
println!(" File: {}", file.file_name);
println!(
" Version: {} ({})",
file.release_type, file.date_published
);
}
if !project.pakku_links.is_empty() {
println!(" Dependencies: {}", project.pakku_links.len());
}
println!();
} else {
let file_info = project
.files
.first()
.map(|f| format!(" ({})", f.file_name))
.unwrap_or_default();
println!(" {}{}", project.get_name(), file_info);
}
}
Ok(())
}

23
src/cli/commands/mod.rs Normal file
View file

@ -0,0 +1,23 @@
pub mod add;
pub mod add_prj;
pub mod cfg;
pub mod cfg_prj;
pub mod credentials;
pub mod credentials_set;
pub mod diff;
pub mod export;
pub mod fetch;
pub mod fork;
pub mod import;
pub mod init;
pub mod inspect;
pub mod link;
pub mod ls;
pub mod remote;
pub mod remote_update;
pub mod rm;
pub mod set;
pub mod status;
pub mod sync;
pub mod unlink;
pub mod update;

151
src/cli/commands/remote.rs Normal file
View file

@ -0,0 +1,151 @@
use std::{
fs,
path::{Path, PathBuf},
};
use crate::{
cli::RemoteArgs,
error::{PakkerError, Result},
fetch::Fetcher,
git,
model::{config::Config, lockfile::LockFile},
};
const REMOTE_DIR: &str = ".pakku-remote";
pub async fn execute(args: RemoteArgs) -> Result<()> {
let remote_path = PathBuf::from(REMOTE_DIR);
// Handle --remove flag
if args.remove {
if remote_path.exists() {
fs::remove_dir_all(&remote_path)?;
log::info!("Removed remote from modpack");
} else {
log::warn!("No remote configured");
}
return Ok(());
}
// If no URL provided, show status
if args.url.is_none() {
show_remote_status(&remote_path)?;
return Ok(());
}
let url = args.url.unwrap();
log::info!("Installing modpack from: {url}");
// Clone or update repository
if remote_path.exists() {
log::info!("Remote directory exists, updating...");
let remote_name = "origin";
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
git::fetch_updates(&remote_path, remote_name, ref_name, None)?;
git::reset_to_ref(&remote_path, remote_name, ref_name)?;
} else {
log::info!("Cloning repository...");
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
git::clone_repository(&url, &remote_path, ref_name, None)?;
}
// Load lockfile and config from remote
let remote_lockfile_path = remote_path.join("pakku-lock.json");
if !remote_lockfile_path.exists() {
return Err(PakkerError::ConfigError(
"Remote repository does not contain pakku-lock.json".to_string(),
));
}
let remote_lockfile = LockFile::load(&remote_path)?;
let remote_config = Config::load(&remote_path).ok();
// Copy lockfile to current directory
let current_lockfile_path = PathBuf::from("pakku-lock.json");
fs::copy(&remote_lockfile_path, &current_lockfile_path)?;
log::info!("Copied lockfile from remote");
// Copy config if exists
if remote_config.is_some() {
let remote_config_path = remote_path.join("pakku.json");
let current_config_path = PathBuf::from("pakku.json");
if remote_config_path.exists() {
fs::copy(&remote_config_path, &current_config_path)?;
log::info!("Copied config from remote");
}
}
// Fetch project files
log::info!("Fetching project files...");
let fetcher = Fetcher::new(&remote_path);
fetcher
.fetch_all(&remote_lockfile, &remote_config.unwrap_or_default())
.await?;
// Sync overrides
sync_overrides(&remote_path, args.server_pack)?;
log::info!("Successfully installed modpack from remote");
Ok(())
}
fn show_remote_status(remote_path: &Path) -> Result<()> {
if !remote_path.exists() {
println!("No remote configured");
return Ok(());
}
println!("Remote status:");
println!(" Directory: {}", remote_path.display());
if git::is_git_repository(remote_path) {
if let Ok(url) = git::get_remote_url(remote_path, "origin") {
println!(" URL: {url}");
}
if let Ok(sha) = git::get_current_commit_sha(remote_path, None) {
println!(" Commit: {}", &sha[..8]);
}
}
Ok(())
}
fn sync_overrides(remote_path: &Path, server_pack: bool) -> Result<()> {
let override_dirs = if server_pack {
vec!["overrides", "server_overrides"]
} else {
vec!["overrides", "client_overrides"]
};
for dir_name in override_dirs {
let src_dir = remote_path.join(dir_name);
if src_dir.exists() && src_dir.is_dir() {
log::info!("Syncing {dir_name} directory...");
copy_dir_recursive(&src_dir, Path::new("."))?;
}
}
Ok(())
}
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
if !dst.exists() {
fs::create_dir_all(dst)?;
}
for entry in fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let file_name = entry.file_name();
let dst_path = dst.join(file_name);
if src_path.is_dir() {
copy_dir_recursive(&src_path, &dst_path)?;
} else {
fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}

View file

@ -0,0 +1,121 @@
use std::path::PathBuf;
use crate::{cli::RemoteUpdateArgs, error::PakkerError, git, model::Config};
/// Update modpack from remote Git repository
///
/// This command updates the current modpack from its remote Git repository.
/// It fetches the latest changes from the remote and syncs overrides.
pub async fn execute(args: RemoteUpdateArgs) -> Result<(), PakkerError> {
// Check if lockfile exists in current directory - if it does, we're in a
// modpack directory and should not update remote (use regular update
// instead)
let lockfile_path = PathBuf::from("pakku-lock.json");
if lockfile_path.exists() {
return Err(PakkerError::InvalidInput(
"Cannot update remote from a modpack directory. Use 'update' command \
instead."
.to_string(),
));
}
// Remote directory for the cloned modpack
let remote_dir = PathBuf::from(".pakku-remote");
// Check if remote directory exists
if !remote_dir.exists() {
return Err(PakkerError::RemoteNotFound(
"No remote found. Use 'remote' command to install a modpack first."
.to_string(),
));
}
// Fetch updates from remote repository
println!("Updating remote repository...");
let remote_name = "origin";
let ref_name = args.branch.as_deref().unwrap_or("HEAD");
git::fetch_updates(&remote_dir, remote_name, ref_name, None)?;
// Read remote lockfile
let remote_lockfile_path = remote_dir.join("pakku-lock.json");
if !remote_lockfile_path.exists() {
return Err(PakkerError::FileNotFound(
"Remote lockfile not found".to_string(),
));
}
// Read remote config if it exists
let remote_config_path = remote_dir.join("pakku.json");
let _remote_config = if remote_config_path.exists() {
match Config::load(&remote_config_path) {
Ok(config) => Some(config),
Err(e) => {
eprintln!("Warning: Could not read remote config: {e}");
None
},
}
} else {
None
};
// Sync overrides from remote directory
println!("Syncing overrides...");
sync_overrides(&remote_dir).await?;
// Clean up remote directory
std::fs::remove_dir_all(&remote_dir)?;
println!("Remote modpack updated successfully.");
Ok(())
}
/// Sync override files from remote directory to current directory
async fn sync_overrides(remote_dir: &PathBuf) -> Result<(), PakkerError> {
let remote_config_path = remote_dir.join("pakku.json");
if !remote_config_path.exists() {
return Ok(());
}
let config = Config::load(&remote_config_path)?;
// Get override directories from config
let overrides = config.overrides;
if overrides.is_empty() {
return Ok(());
}
for override_path in overrides {
let source = remote_dir.join(&override_path);
let dest = PathBuf::from(&override_path);
if source.exists() {
// Copy override directory
copy_directory(&source, &dest)?;
println!(" Synced: {override_path}");
}
}
Ok(())
}
/// Recursively copy a directory
fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<(), PakkerError> {
if !dest.exists() {
std::fs::create_dir_all(dest)?;
}
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let path = entry.path();
let dest_path = dest.join(entry.file_name());
if path.is_dir() {
copy_directory(&path, &dest_path)?;
} else {
std::fs::copy(&path, &dest_path)?;
}
}
Ok(())
}

89
src/cli/commands/rm.rs Normal file
View file

@ -0,0 +1,89 @@
use std::path::Path;
use crate::{
cli::RmArgs,
error::{PakkerError, Result},
model::LockFile,
ui_utils::prompt_yes_no,
};
pub async fn execute(
args: RmArgs,
lockfile_path: &Path,
_config_path: &Path,
) -> Result<()> {
log::info!("Removing projects: {:?}", args.inputs);
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let mut removed_count = 0;
let mut removed_ids = Vec::new();
let mut projects_to_remove = Vec::new();
// First, identify all projects to remove
for input in &args.inputs {
// Find project by various identifiers
if let Some(project) = lockfile.projects.iter().find(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|| p.aliases.contains(input)
}) {
projects_to_remove.push(project.get_name());
} else {
log::warn!("Project not found: {input}");
}
}
if projects_to_remove.is_empty() {
return Err(PakkerError::ProjectNotFound(
"None of the specified projects found".to_string(),
));
}
// Ask for confirmation unless --yes flag is provided
if !args.yes {
println!("The following projects will be removed:");
for name in &projects_to_remove {
println!(" - {name}");
}
if !prompt_yes_no("Do you want to continue?", false)? {
println!("Removal cancelled.");
return Ok(());
}
}
// Now actually remove the projects
for input in &args.inputs {
if let Some(pos) = lockfile.projects.iter().position(|p| {
p.pakku_id.as_deref() == Some(input)
|| p.slug.values().any(|s| s == input)
|| p.name.values().any(|n| n.eq_ignore_ascii_case(input))
|| p.aliases.contains(input)
}) {
let project = lockfile.projects.remove(pos);
log::info!("Removed: {}", project.get_name());
if let Some(pakku_id) = project.pakku_id.clone() {
removed_ids.push(pakku_id);
}
removed_count += 1;
}
}
// Clean up pakku_links from all remaining projects
for project in &mut lockfile.projects {
project
.pakku_links
.retain(|link| !removed_ids.contains(link));
}
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully removed {removed_count} project(s)");
Ok(())
}

156
src/cli/commands/set.rs Normal file
View file

@ -0,0 +1,156 @@
use std::{collections::HashMap, path::Path, str::FromStr};
use crate::{
cli::SetArgs,
error::PakkerError,
model::{Config, LockFile, ProjectSide, ProjectType, Target, UpdateStrategy},
};
pub async fn execute(
args: SetArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<(), PakkerError> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Check if we're modifying lockfile properties or project properties
let is_lockfile_modification = args.target.is_some()
|| args.mc_versions.is_some()
|| args.loaders.is_some();
if is_lockfile_modification {
// Modify lockfile properties
if let Some(target_str) = &args.target {
let target = Target::from_str(target_str).map_err(|e| {
PakkerError::InvalidInput(format!("Invalid target: {e}"))
})?;
lockfile.target = Some(target);
println!("Set target to: {target:?}");
}
if let Some(mc_versions_str) = &args.mc_versions {
let mc_versions: Vec<String> = mc_versions_str
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
if mc_versions.is_empty() {
return Err(PakkerError::InvalidInput(
"At least one Minecraft version is required".to_string(),
));
}
// Validate that all projects are compatible with new MC versions
for project in &lockfile.projects {
let compatible = project
.files
.iter()
.any(|file| file.mc_versions.iter().any(|v| mc_versions.contains(v)));
if !compatible {
eprintln!(
"Warning: Project '{}' has no files compatible with new MC \
versions",
project.get_name()
);
}
}
lockfile.mc_versions = mc_versions.clone();
println!("Set Minecraft versions to: {mc_versions:?}");
}
if let Some(loaders_str) = &args.loaders {
let mut loaders: HashMap<String, String> = HashMap::new();
for pair in loaders_str.split(',') {
let parts: Vec<&str> = pair.split('=').collect();
if parts.len() != 2 {
return Err(PakkerError::InvalidInput(format!(
"Invalid loader format '{pair}'. Expected 'name=version'"
)));
}
loaders
.insert(parts[0].trim().to_string(), parts[1].trim().to_string());
}
if loaders.is_empty() {
return Err(PakkerError::InvalidInput(
"At least one loader is required".to_string(),
));
}
let loader_names: Vec<String> = loaders.keys().cloned().collect();
// Validate that all projects are compatible with new loaders
for project in &lockfile.projects {
let compatible = project.files.iter().any(|file| {
file.loaders.is_empty()
|| file.loaders.iter().any(|l| loader_names.contains(l))
});
if !compatible {
eprintln!(
"Warning: Project '{}' has no files compatible with new loaders",
project.get_name()
);
}
}
lockfile.loaders = loaders.clone();
println!("Set loaders to: {loaders:?}");
}
lockfile.save(lockfile_dir)?;
println!("Lockfile properties updated successfully");
} else if let Some(input) = &args.input {
// Modify project properties
let project_name = {
let project = lockfile
.projects
.iter_mut()
.find(|p| p.matches_input(input))
.ok_or_else(|| PakkerError::ProjectNotFound(input.clone()))?;
if let Some(type_str) = &args.r#type {
let project_type =
ProjectType::from_str(type_str).map_err(PakkerError::InvalidInput)?;
project.r#type = project_type;
}
if let Some(side_str) = &args.side {
let side =
ProjectSide::from_str(side_str).map_err(PakkerError::InvalidInput)?;
project.side = side;
}
if let Some(strategy_str) = &args.strategy {
let strategy = UpdateStrategy::from_str(strategy_str)
.map_err(PakkerError::InvalidInput)?;
project.update_strategy = strategy;
}
if let Some(redistributable) = args.redistributable {
project.redistributable = redistributable;
}
project.get_name()
};
lockfile.save(lockfile_dir)?;
config.save(config_dir)?;
println!("Updated project: {project_name}");
} else {
return Err(PakkerError::InvalidInput(
"Either provide a project identifier or lockfile properties to modify"
.to_string(),
));
}
Ok(())
}

370
src/cli/commands/status.rs Normal file
View file

@ -0,0 +1,370 @@
use std::{collections::HashMap, path::Path, sync::Arc};
use futures::stream::{FuturesUnordered, StreamExt};
use indicatif::{ProgressBar, ProgressStyle};
use tokio::sync::Semaphore;
use yansi::Paint;
use crate::{
error::Result,
model::{Config, LockFile, Project},
platform::create_platform,
};
pub async fn execute(
parallel: bool,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Display modpack metadata
display_modpack_info(&lockfile, &config);
println!();
// Check for updates (sequential or parallel)
let (updates, errors) = if parallel {
check_updates_parallel(&lockfile).await?
} else {
check_updates_sequential(&lockfile).await?
};
// Display results
display_update_results(&updates);
// Display errors if any
if !errors.is_empty() {
println!();
println!("{}", "Errors encountered:".red());
for (project, error) in &errors {
println!(" - {}: {}", project.yellow(), error.red());
}
}
// Prompt to update if there are updates available
if !updates.is_empty() {
println!();
if crate::ui_utils::prompt_yes_no("Update now?", false)? {
// Call update command programmatically (update all projects)
let update_args = crate::cli::UpdateArgs {
inputs: vec![],
yes: true, // Auto-yes for status command
};
crate::cli::commands::update::execute(
update_args,
lockfile_path,
config_path,
)
.await?;
}
}
Ok(())
}
fn display_modpack_info(lockfile: &LockFile, config: &Config) {
let author = config.author.as_deref().unwrap_or("Unknown");
println!(
"Managing {} modpack, version {}, by {}",
config.name.cyan(),
config.version.cyan(),
author.cyan()
);
let mc_versions = lockfile.mc_versions.join(", ");
let loaders: Vec<String> = lockfile
.loaders
.iter()
.map(|(loader, version)| format!("{loader}-{version}"))
.collect();
let loaders_str = loaders.join(", ");
println!(
"on Minecraft version {}, loader {}, targeting platform {:?}.",
mc_versions.cyan(),
loaders_str.cyan(),
lockfile.target
);
}
#[derive(Debug)]
struct ProjectUpdate {
slug: HashMap<String, String>,
name: String,
project_type: String,
side: String,
file_updates: Vec<FileUpdate>,
}
#[derive(Debug)]
struct FileUpdate {
platform: String,
old_filename: String,
new_filename: String,
}
async fn check_updates_sequential(
lockfile: &LockFile,
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
let total = lockfile.projects.len();
let mut updates = Vec::new();
let mut errors = Vec::new();
// Create progress bar
let pb = ProgressBar::new(total as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("#>-"),
);
pb.set_message("Checking for updates...");
for project in &lockfile.projects {
let project_name = project
.name
.values()
.next()
.unwrap_or(&"Unknown".to_string())
.clone();
pb.set_message(format!("Checking {project_name}..."));
match check_project_update(project, lockfile).await {
Ok(update_opt) => {
if let Some(update) = update_opt {
updates.push(update);
}
},
Err(e) => {
errors.push((project_name.clone(), e.to_string()));
},
}
pb.inc(1);
}
pb.finish_with_message(format!("Checked {total} projects"));
println!(); // Add blank line after progress bar
Ok((updates, errors))
}
async fn check_updates_parallel(
lockfile: &LockFile,
) -> Result<(Vec<ProjectUpdate>, Vec<(String, String)>)> {
let total = lockfile.projects.len();
let semaphore = Arc::new(Semaphore::new(10));
let mut futures = FuturesUnordered::new();
// Create progress bar
let pb = Arc::new(ProgressBar::new(total as u64));
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("#>-"),
);
pb.set_message("Checking for updates (parallel)...");
for project in &lockfile.projects {
let project = project.clone();
let sem = semaphore.clone();
let pb_clone = pb.clone();
let lockfile_clone = lockfile.clone();
futures.push(async move {
let _permit = sem.acquire().await.unwrap();
let result = check_project_update(&project, &lockfile_clone).await;
pb_clone.inc(1);
(project, result)
});
}
let mut updates = Vec::new();
let mut errors = Vec::new();
while let Some((project, result)) = futures.next().await {
match result {
Ok(update_opt) => {
if let Some(update) = update_opt {
updates.push(update);
}
},
Err(e) => {
let project_name = project
.name
.values()
.next()
.unwrap_or(&"Unknown".to_string())
.clone();
errors.push((project_name, e.to_string()));
},
}
}
pb.finish_with_message(format!("Checked {total} projects"));
println!(); // Add blank line after progress bar
Ok((updates, errors))
}
async fn check_project_update(
project: &Project,
lockfile: &LockFile,
) -> Result<Option<ProjectUpdate>> {
// Get primary slug
let slug = project
.slug
.values()
.next()
.ok_or_else(|| {
crate::error::PakkerError::InvalidProject("No slug found".to_string())
})?
.clone();
// Try each platform in project
for platform_name in project.id.keys() {
let api_key = get_api_key(platform_name);
let platform = match create_platform(platform_name, api_key) {
Ok(p) => p,
Err(_) => continue,
};
let loaders: Vec<String> = lockfile.loaders.keys().cloned().collect();
match platform
.request_project_with_files(&slug, &lockfile.mc_versions, &loaders)
.await
{
Ok(updated_project) => {
// Compare files to detect updates
let file_updates = detect_file_updates(project, &updated_project);
if !file_updates.is_empty() {
return Ok(Some(ProjectUpdate {
slug: project.slug.clone(),
name: project.name.values().next().cloned().unwrap_or_default(),
project_type: format!("{:?}", project.r#type),
side: format!("{:?}", project.side),
file_updates,
}));
}
return Ok(None); // No updates
},
Err(_) => {
// Try next platform
continue;
},
}
}
Err(crate::error::PakkerError::PlatformApiError(
"Failed to check for updates on any platform".to_string(),
))
}
fn detect_file_updates(
current: &Project,
updated: &Project,
) -> Vec<FileUpdate> {
let mut updates = Vec::new();
for old_file in &current.files {
if let Some(new_file) = updated
.files
.iter()
.find(|f| f.file_type == old_file.file_type)
{
// Check if file ID changed (indicates update)
if new_file.id != old_file.id {
updates.push(FileUpdate {
platform: old_file.file_type.clone(),
old_filename: old_file.file_name.clone(),
new_filename: new_file.file_name.clone(),
});
}
}
}
updates
}
fn display_update_results(updates: &[ProjectUpdate]) {
if updates.is_empty() {
println!("{}", "✓ All projects are up to date".green());
return;
}
println!();
println!("{}", "📦 Updates Available:".cyan().bold());
println!();
for update in updates {
// Create hyperlink for project name using ui_utils
let project_url = if let Some((platform, slug)) = update.slug.iter().next()
{
match platform.as_str() {
"modrinth" => crate::ui_utils::modrinth_project_url(slug),
"curseforge" => crate::ui_utils::curseforge_project_url(slug),
_ => String::new(),
}
} else {
String::new()
};
if project_url.is_empty() {
println!(
"{} ({}, {})",
update.name.yellow(),
update.project_type,
update.side
);
} else {
let hyperlinked = crate::ui_utils::hyperlink(
&project_url,
&update.name.yellow().to_string(),
);
println!("{} ({}, {})", hyperlinked, update.project_type, update.side);
}
for file_update in &update.file_updates {
println!(
" • {}: {} → {}",
file_update.platform.cyan(),
file_update.old_filename.dim(),
file_update.new_filename.green()
);
}
println!();
}
println!(
"{}",
format!("{} project(s) need updates", updates.len()).yellow()
);
}
#[allow(dead_code)]
fn get_project_display_name(project: &Project) -> String {
project
.name
.values()
.next()
.or_else(|| project.slug.values().next())
.cloned()
.unwrap_or_else(|| "Unknown".to_string())
}
fn get_api_key(platform: &str) -> Option<String> {
match platform {
"modrinth" => std::env::var("MODRINTH_TOKEN").ok(),
"curseforge" => std::env::var("CURSEFORGE_API_KEY").ok(),
_ => None,
}
}

216
src/cli/commands/sync.rs Normal file
View file

@ -0,0 +1,216 @@
use std::{
collections::{HashMap, HashSet},
fs,
io::{self, Write},
path::{Path, PathBuf},
};
use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::SyncArgs,
error::{PakkerError, Result},
fetch::Fetcher,
model::{Config, LockFile},
platform::{CurseForgePlatform, ModrinthPlatform, PlatformClient},
};
enum SyncChange {
Addition(PathBuf, String), // (file_path, project_name)
Removal(String), // project_pakku_id
}
pub async fn execute(
args: SyncArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<()> {
log::info!("Synchronizing with lockfile");
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let config = Config::load(config_dir)?;
// Detect changes
let changes = detect_changes(&lockfile, &config)?;
if changes.is_empty() {
println!("✓ Everything is in sync");
return Ok(());
}
// Filter changes based on flags
let mut additions = Vec::new();
let mut removals = Vec::new();
for change in changes {
match change {
SyncChange::Addition(path, name) => additions.push((path, name)),
SyncChange::Removal(id) => removals.push(id),
}
}
// Apply filters
let no_filter = !args.additions && !args.removals;
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.unwrap(),
);
if no_filter || args.additions {
for (file_path, _) in &additions {
spinner
.set_message(format!("Processing addition: {}", file_path.display()));
if prompt_user(&format!("Add {} to lockfile?", file_path.display()))? {
add_file_to_lockfile(&mut lockfile, file_path, &config).await?;
}
}
}
if no_filter || args.removals {
for pakku_id in &removals {
if let Some(project) = lockfile
.projects
.iter()
.find(|p| p.pakku_id.as_ref() == Some(pakku_id))
{
let name = project
.name
.values()
.next()
.map(std::string::String::as_str)
.or(project.pakku_id.as_deref())
.unwrap_or("unknown");
spinner.set_message(format!("Processing removal: {name}"));
if prompt_user(&format!("Remove {name} from lockfile?"))? {
lockfile
.remove_project(pakku_id)
.ok_or_else(|| PakkerError::ProjectNotFound(pakku_id.clone()))?;
}
}
}
}
spinner.finish_and_clear();
// Save changes
lockfile.save(lockfile_dir)?;
// Fetch missing files
let fetcher = Fetcher::new(".");
fetcher.sync(&lockfile, &config).await?;
println!("✓ Sync complete");
Ok(())
}
fn detect_changes(
lockfile: &LockFile,
config: &Config,
) -> Result<Vec<SyncChange>> {
let mut changes = Vec::new();
// Get paths for each project type
let paths = config.paths.clone();
let mods_path = paths
.get("mods")
.map_or("mods", std::string::String::as_str);
// Build map of lockfile projects by file path
let mut lockfile_files: HashMap<PathBuf, String> = HashMap::new();
for project in &lockfile.projects {
for file in &project.files {
let file_path = PathBuf::from(mods_path).join(&file.file_name);
if let Some(ref pakku_id) = project.pakku_id {
lockfile_files.insert(file_path, pakku_id.clone());
}
}
}
// Scan filesystem for additions
if let Ok(entries) = fs::read_dir(mods_path) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file()
&& let Some(ext) = path.extension()
&& ext == "jar"
&& !lockfile_files.contains_key(&path)
{
let name = path.file_name().unwrap().to_string_lossy().to_string();
changes.push(SyncChange::Addition(path, name));
}
}
}
// Check for removals (projects in lockfile but files missing)
let filesystem_files: HashSet<_> =
if let Ok(entries) = fs::read_dir(mods_path) {
entries
.flatten()
.map(|e| e.path())
.filter(|p| p.is_file())
.collect()
} else {
HashSet::new()
};
for (lockfile_path, pakku_id) in &lockfile_files {
if !filesystem_files.contains(lockfile_path) {
changes.push(SyncChange::Removal(pakku_id.clone()));
}
}
Ok(changes)
}
async fn add_file_to_lockfile(
lockfile: &mut LockFile,
file_path: &Path,
_config: &Config,
) -> Result<()> {
// Try to identify the file by hash lookup
let _modrinth = ModrinthPlatform::new();
let curseforge = CurseForgePlatform::new(None);
// Compute file hash
let file_data = fs::read(file_path)?;
// Compute SHA-1 hash from file bytes
use sha1::Digest;
let mut hasher = sha1::Sha1::new();
hasher.update(&file_data);
let hash = format!("{:x}", hasher.finalize());
// Try Modrinth first (SHA-1 hash)
if let Ok(Some(project)) = _modrinth.lookup_by_hash(&hash).await {
lockfile.add_project(project);
println!("✓ Added {} (from Modrinth)", file_path.display());
return Ok(());
}
// Try CurseForge (Murmur2 hash computed from file)
if let Ok(Some(project)) = curseforge.lookup_by_hash(&hash).await {
lockfile.add_project(project);
println!("✓ Added {} (from CurseForge)", file_path.display());
return Ok(());
}
println!("⚠ Could not identify {}, skipping", file_path.display());
Ok(())
}
fn prompt_user(message: &str) -> Result<bool> {
print!("{message} [y/N] ");
io::stdout().flush().map_err(PakkerError::IoError)?;
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.map_err(PakkerError::IoError)?;
Ok(input.trim().eq_ignore_ascii_case("y"))
}

View file

@ -0,0 +1,41 @@
use std::path::Path;
use crate::{
cli::UnlinkArgs,
error::{PakkerError, Result},
model::LockFile,
};
pub fn execute(args: UnlinkArgs, lockfile_path: &Path) -> Result<()> {
log::info!("Unlinking {} -> {}", args.from, args.to);
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
// Find projects
let from_project = lockfile
.projects
.iter()
.find(|p| p.matches_input(&args.from))
.ok_or_else(|| PakkerError::ProjectNotFound(args.from.clone()))?;
let from_id = from_project.pakku_id.clone().ok_or_else(|| {
PakkerError::InvalidProject("From project has no pakku_id".to_string())
})?;
let to_project = lockfile
.projects
.iter_mut()
.find(|p| p.matches_input(&args.to))
.ok_or_else(|| PakkerError::ProjectNotFound(args.to.clone()))?;
// Remove link
to_project.pakku_links.remove(&from_id);
// Save lockfile
lockfile.save(lockfile_dir)?;
log::info!("Successfully unlinked projects");
Ok(())
}

140
src/cli/commands/update.rs Normal file
View file

@ -0,0 +1,140 @@
use std::{collections::HashMap, path::Path};
use indicatif::{ProgressBar, ProgressStyle};
use crate::{
cli::UpdateArgs,
error::PakkerError,
model::{Config, LockFile},
platform::create_platform,
ui_utils::prompt_select,
};
pub async fn execute(
args: UpdateArgs,
lockfile_path: &Path,
config_path: &Path,
) -> Result<(), PakkerError> {
// Load expects directory path, so get parent directory
let lockfile_dir = lockfile_path.parent().unwrap_or(Path::new("."));
let config_dir = config_path.parent().unwrap_or(Path::new("."));
let mut lockfile = LockFile::load(lockfile_dir)?;
let _config = Config::load(config_dir)?;
// Create platforms
let mut platforms = HashMap::new();
if let Ok(platform) = create_platform("modrinth", None) {
platforms.insert("modrinth".to_string(), platform);
}
if let Ok(platform) =
create_platform("curseforge", std::env::var("CURSEFORGE_API_KEY").ok())
{
platforms.insert("curseforge".to_string(), platform);
}
let project_indices: Vec<_> = if args.inputs.is_empty() {
(0..lockfile.projects.len()).collect()
} else {
let mut indices = Vec::new();
for input in &args.inputs {
if let Some((idx, _)) = lockfile
.projects
.iter()
.enumerate()
.find(|(_, p)| p.matches_input(input))
{
indices.push(idx);
} else {
return Err(PakkerError::ProjectNotFound(input.clone()));
}
}
indices
};
// Create progress bar
let pb = ProgressBar::new(project_indices.len() as u64);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("#>-"),
);
for idx in project_indices {
let old_project = &lockfile.projects[idx];
pb.set_message(format!("Updating {}...", old_project.get_name()));
let slug = old_project
.slug
.values()
.next()
.ok_or_else(|| PakkerError::InvalidProject("No slug found".into()))?;
// Find updated project from one of the platforms
let mut updated_project = None;
for platform in platforms.values() {
if let Ok(project) = platform
.request_project_with_files(
slug,
&lockfile.mc_versions,
&lockfile.loaders.keys().cloned().collect::<Vec<_>>(),
)
.await
{
updated_project = Some(project);
break;
}
}
if let Some(mut updated_project) = updated_project
&& !updated_project.files.is_empty()
&& let Some(old_file) = lockfile.projects[idx].files.first()
{
let new_file = updated_project.files.first().unwrap();
if new_file.id == old_file.id {
pb.println(format!(
" {} - Already up to date",
old_project.get_name()
));
} else {
// Interactive version selection if not using --yes flag
if !args.yes && updated_project.files.len() > 1 {
pb.suspend(|| {
let choices: Vec<String> = updated_project
.files
.iter()
.map(|f| format!("{} ({})", f.file_name, f.id))
.collect();
let choice_refs: Vec<&str> =
choices.iter().map(std::string::String::as_str).collect();
if let Ok(selected_idx) = prompt_select(
&format!("Select version for {}:", old_project.get_name()),
&choice_refs,
) {
// Move selected file to front
if selected_idx > 0 {
updated_project.files.swap(0, selected_idx);
}
}
});
}
let selected_file = updated_project.files.first().unwrap();
pb.println(format!(
" {} -> {}",
old_file.file_name, selected_file.file_name
));
lockfile.projects[idx] = updated_project;
}
}
pb.inc(1);
}
pb.finish_with_message("Update complete");
lockfile.save(lockfile_dir)?;
Ok(())
}

110
src/error.rs Normal file
View file

@ -0,0 +1,110 @@
use thiserror::Error;
pub type Result<T> = std::result::Result<T, PakkerError>;
#[derive(Error, Debug)]
pub enum PakkerError {
// Network errors
#[error("Network request failed: {0}")]
NetworkError(#[from] reqwest::Error),
#[error("Platform API error: {0}")]
PlatformApiError(String),
// Validation errors
#[error("Invalid lock file: {0}")]
InvalidLockFile(String),
#[error("Invalid config file: {0}")]
InvalidConfigFile(String),
#[error("Project not found: {0}")]
ProjectNotFound(String),
#[error("File selection error: {0}")]
FileSelectionError(String),
#[error("File not found: {0}")]
FileNotFound(String),
// Conflict errors
#[error("Circular dependency detected: {0}")]
CircularDependency(String),
// File I/O errors
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Serialization error: {0}")]
SerializationError(#[from] serde_json::Error),
#[error("Hash mismatch for file {file}: expected {expected}, got {actual}")]
HashMismatch {
file: String,
expected: String,
actual: String,
},
#[error("Download failed: {0}")]
DownloadFailed(String),
// Export errors
#[error("Export failed: {0}")]
ExportFailed(String),
#[error("Invalid export profile: {0}")]
InvalidExportProfile(String),
// General errors
#[error("Configuration error: {0}")]
ConfigError(String),
#[error("Internal error: {0}")]
InternalError(String),
#[error("Already exists: {0}")]
AlreadyExists(String),
#[error("Invalid input: {0}")]
InvalidInput(String),
#[error("Invalid project: {0}")]
InvalidProject(String),
#[error("Invalid import file: {0}")]
InvalidImportFile(String),
#[error("Zip error: {0}")]
ZipError(#[from] zip::result::ZipError),
// Git and Fork errors
#[error("Git error: {0}")]
GitError(String),
#[error("Remote not found: {0}")]
RemoteNotFound(String),
#[error("Fork error: {0}")]
Fork(String),
#[error("Invalid hash: {0}")]
InvalidHash(String),
#[error("Invalid response: {0}")]
InvalidResponse(String),
#[error("IPC error: {0}")]
IpcError(String),
}
impl From<git2::Error> for PakkerError {
fn from(err: git2::Error) -> Self {
Self::GitError(err.to_string())
}
}
impl From<crate::ipc::IpcError> for PakkerError {
fn from(err: crate::ipc::IpcError) -> Self {
Self::IpcError(err.to_string())
}
}

266
src/export.rs Normal file
View file

@ -0,0 +1,266 @@
mod profile_config;
mod profiles;
mod rules;
use std::{
fs,
path::{Path, PathBuf},
};
use indicatif::{ProgressBar, ProgressStyle};
pub use profile_config::ProfileConfig;
pub use profiles::{ExportProfile, create_profile};
pub use rules::{Effect, Rule, RuleContext};
use crate::{
error::{PakkerError, Result},
model::{Config, LockFile},
};
pub struct Exporter {
base_path: PathBuf,
}
impl Exporter {
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
Self {
base_path: base_path.as_ref().to_path_buf(),
}
}
/// Export all default profiles (curseforge, modrinth, serverpack).
///
/// In multi-profile mode we try each profile independently.
/// - Profiles that can't run due to missing required credentials are skipped.
/// - Profiles that fail for other reasons are recorded and reported.
///
/// Returns successfully exported files. If any profile failed (non-skip),
/// returns an error after attempting all profiles.
pub async fn export_all_profiles(
&mut self,
lockfile: &LockFile,
config: &Config,
output_path: &Path,
) -> Result<Vec<PathBuf>> {
let profiles = vec!["curseforge", "modrinth", "serverpack"];
let mut output_files = Vec::new();
let mut failures: Vec<(String, String)> = Vec::new();
println!("Exporting {} profiles...", profiles.len());
for profile_name in profiles {
match self
.export(profile_name, lockfile, config, output_path)
.await
{
Ok(output_file) => output_files.push(output_file),
Err(err) => {
if Self::is_auth_error(&err) {
eprintln!(
"{profile_name} export skipped (authentication required)"
);
continue;
}
eprintln!("{profile_name} export failed: {err}");
failures.push((profile_name.to_string(), err.to_string()));
},
}
}
if !failures.is_empty() {
return Err(PakkerError::ExportFailed(format!(
"{} profile(s) failed",
failures.len()
)));
}
if output_files.is_empty() {
return Err(PakkerError::ExportFailed(
"No export profiles produced an output file".to_string(),
));
}
println!("All profiles exported successfully.");
Ok(output_files)
}
fn is_auth_error(err: &PakkerError) -> bool {
// Auth/token/API-key issues should not abort multi-profile export as a
// whole. We detect these by messages emitted from the downloader.
match err {
PakkerError::InternalError(msg) => {
msg.contains("authentication error")
|| msg.contains("unauthorized")
|| msg.contains("forbidden")
},
_ => false,
}
}
/// Export modpack using specified profile
pub async fn export(
&mut self,
profile_name: &str,
lockfile: &LockFile,
config: &Config,
output_path: &Path,
) -> Result<PathBuf> {
let spinner = ProgressBar::new_spinner();
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.cyan} {msg}")
.unwrap(),
);
spinner.set_message(format!("Preparing {profile_name} export..."));
spinner.enable_steady_tick(std::time::Duration::from_millis(80));
let spinner = &spinner;
// Get export profile
let profile = create_profile(profile_name)?;
log::info!(
"Exporting with profile: {} ({})",
profile_name,
profile.name()
);
// Load profile-specific configuration if available, otherwise use defaults
let profile_config = config
.export_profiles
.as_ref()
.and_then(|profiles| profiles.get(profile_name))
.cloned()
.or_else(|| {
// Use defaults based on profile name
match profile_name {
"curseforge" => Some(ProfileConfig::curseforge_default()),
"modrinth" => Some(ProfileConfig::modrinth_default()),
"serverpack" => Some(ProfileConfig::serverpack_default()),
_ => None,
}
});
// Create temporary export directory
let temp_dir = tempfile::tempdir()?;
let export_dir = temp_dir.path();
// Build rule context
let context = RuleContext {
lockfile: lockfile.clone(),
config: config.clone(),
profile_config,
export_path: export_dir.to_path_buf(),
base_path: self.base_path.clone(),
ui: Some(spinner.clone()),
};
spinner.set_message("Collecting export rules...");
// Apply rules and collect effects
let mut effects = Vec::new();
for rule in profile.rules() {
if rule.matches(&context) {
effects.extend(rule.effects());
}
}
// Execute effects with descriptive messages
for effect in &effects {
let effect_name = effect.name();
spinner.set_message(format!("Exporting: {effect_name}..."));
effect.execute(&context).await?;
}
spinner.set_message("Creating archive...");
// Package export
let output_file =
self.package_export(export_dir, output_path, profile_name, config)?;
// Cleanup
drop(temp_dir);
spinner.finish_and_clear();
println!("Exported to: {}", output_file.display());
Ok(output_file)
}
/// Package export directory into final format
fn package_export(
&self,
export_dir: &Path,
output_path: &Path,
profile_name: &str,
config: &Config,
) -> Result<PathBuf> {
// Pakku layout support: if output_path ends with "build" (set by CLI),
// create build/<profile>/.
let profile_output_path =
if output_path.file_name().and_then(|n| n.to_str()) == Some("build") {
output_path.join(profile_name)
} else {
output_path.to_path_buf()
};
fs::create_dir_all(&profile_output_path)?;
// Use .mrpack extension for Modrinth, .zip for others
let extension = if profile_name == "modrinth" {
"mrpack"
} else {
"zip"
};
let output_file = profile_output_path.join(format!(
"{}-{}-{}.{}",
config.name, config.version, profile_name, extension
));
// Create zip archive
let file = fs::File::create(&output_file)?;
let mut zip = zip::ZipWriter::new(file);
let options = zip::write::FileOptions::default()
.compression_method(zip::CompressionMethod::Deflated)
.unix_permissions(0o755);
// Add all files from export directory
self.add_directory_to_zip(&mut zip, export_dir, export_dir, options)?;
zip.finish()?;
Ok(output_file)
}
/// Recursively add directory to zip
fn add_directory_to_zip(
&self,
zip: &mut zip::ZipWriter<fs::File>,
base_path: &Path,
current_path: &Path,
options: zip::write::SimpleFileOptions,
) -> Result<()> {
for entry in fs::read_dir(current_path)? {
let entry = entry?;
let path = entry.path();
let relative_path = path
.strip_prefix(base_path)
.map_err(|e| PakkerError::InternalError(e.to_string()))?;
if path.is_file() {
zip.start_file(relative_path.to_string_lossy().to_string(), options)?;
let content = fs::read(&path)?;
zip.write_all(&content)?;
} else if path.is_dir() {
zip.add_directory(
relative_path.to_string_lossy().to_string(),
options,
)?;
self.add_directory_to_zip(zip, base_path, &path, options)?;
}
}
Ok(())
}
}
use std::io::Write;

66
src/export/cache.rs Normal file
View file

@ -0,0 +1,66 @@
use crate::error::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, Serialize, Deserialize)]
struct CacheEntry {
hash: String,
path: PathBuf,
}
pub struct ExportCache {
cache_dir: PathBuf,
entries: HashMap<String, CacheEntry>,
}
impl ExportCache {
pub fn new(cache_dir: PathBuf) -> Self {
let entries = Self::load_cache(&cache_dir).unwrap_or_default();
Self { cache_dir, entries }
}
fn load_cache(cache_dir: &Path) -> Result<HashMap<String, CacheEntry>> {
let cache_file = cache_dir.join("export-cache.json");
if !cache_file.exists() {
return Ok(HashMap::new());
}
let content = fs::read_to_string(cache_file)?;
let entries = serde_json::from_str(&content)?;
Ok(entries)
}
pub fn get(&self, key: &str) -> Option<&CacheEntry> {
self.entries.get(key)
}
pub fn put(&mut self, key: String, hash: String, path: PathBuf) {
self.entries.insert(key, CacheEntry { hash, path });
}
pub fn save(&self) -> Result<()> {
fs::create_dir_all(&self.cache_dir)?;
let cache_file = self.cache_dir.join("export-cache.json");
let content = serde_json::to_string_pretty(&self.entries)?;
fs::write(cache_file, content)?;
Ok(())
}
pub fn clear(&mut self) -> Result<()> {
self.entries.clear();
if self.cache_dir.exists() {
fs::remove_dir_all(&self.cache_dir)?;
}
Ok(())
}
}

View file

@ -0,0 +1,161 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
/// Profile-specific export configuration
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct ProfileConfig {
/// Custom override paths for this profile
#[serde(skip_serializing_if = "Option::is_none")]
pub overrides: Option<Vec<String>>,
/// Custom server override paths for this profile
#[serde(skip_serializing_if = "Option::is_none")]
pub server_overrides: Option<Vec<String>>,
/// Custom client override paths for this profile
#[serde(skip_serializing_if = "Option::is_none")]
pub client_overrides: Option<Vec<String>>,
/// Platform filter - only include projects available on this platform
#[serde(skip_serializing_if = "Option::is_none")]
pub filter_platform: Option<String>,
/// Include non-redistributable projects (default: false for `CurseForge`,
/// true for others)
#[serde(skip_serializing_if = "Option::is_none")]
pub include_non_redistributable: Option<bool>,
/// Include client-only mods in server exports (default: false)
#[serde(skip_serializing_if = "Option::is_none")]
pub include_client_only: Option<bool>,
/// Custom project-specific settings for this profile
#[serde(skip_serializing_if = "Option::is_none")]
pub project_overrides: Option<HashMap<String, ProjectOverride>>,
}
/// Project-specific overrides for a profile
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct ProjectOverride {
/// Whether to export this project in this profile
#[serde(skip_serializing_if = "Option::is_none")]
pub export: Option<bool>,
/// Custom subpath for this project in this profile
#[serde(skip_serializing_if = "Option::is_none")]
pub subpath: Option<String>,
}
impl ProfileConfig {
/// Get effective override paths, falling back to global config
pub fn get_overrides<'a>(
&'a self,
global_overrides: &'a [String],
) -> &'a [String] {
self.overrides.as_deref().unwrap_or(global_overrides)
}
/// Get effective server override paths, falling back to global config
pub fn get_server_overrides<'a>(
&'a self,
global_server_overrides: Option<&'a Vec<String>>,
) -> Option<&'a [String]> {
self
.server_overrides
.as_deref()
.or(global_server_overrides.map(std::vec::Vec::as_slice))
}
/// Get default config for `CurseForge` profile
pub fn curseforge_default() -> Self {
Self {
filter_platform: Some("curseforge".to_string()),
include_non_redistributable: Some(false),
..Default::default()
}
}
/// Get default config for Modrinth profile
pub fn modrinth_default() -> Self {
Self {
filter_platform: Some("modrinth".to_string()),
include_non_redistributable: Some(true),
..Default::default()
}
}
/// Get default config for `ServerPack` profile
pub fn serverpack_default() -> Self {
Self {
include_client_only: Some(false),
..Default::default()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default_profile_config() {
let config = ProfileConfig::default();
assert!(config.overrides.is_none());
assert!(config.filter_platform.is_none());
}
#[test]
fn test_curseforge_default() {
let config = ProfileConfig::curseforge_default();
assert_eq!(config.filter_platform, Some("curseforge".to_string()));
assert_eq!(config.include_non_redistributable, Some(false));
}
#[test]
fn test_modrinth_default() {
let config = ProfileConfig::modrinth_default();
assert_eq!(config.filter_platform, Some("modrinth".to_string()));
assert_eq!(config.include_non_redistributable, Some(true));
}
#[test]
fn test_serverpack_default() {
let config = ProfileConfig::serverpack_default();
assert_eq!(config.include_client_only, Some(false));
}
#[test]
fn test_get_overrides_with_custom() {
let mut config = ProfileConfig::default();
config.overrides = Some(vec!["custom-overrides".to_string()]);
let global = vec!["overrides".to_string()];
assert_eq!(config.get_overrides(&global), &["custom-overrides"]);
}
#[test]
fn test_get_overrides_fallback_to_global() {
let config = ProfileConfig::default();
let global = vec!["overrides".to_string()];
assert_eq!(config.get_overrides(&global), &["overrides"]);
}
#[test]
fn test_serialization() {
let mut config = ProfileConfig::default();
config.filter_platform = Some("modrinth".to_string());
config.include_non_redistributable = Some(true);
let json = serde_json::to_string(&config).unwrap();
let deserialized: ProfileConfig = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.filter_platform, config.filter_platform);
assert_eq!(
deserialized.include_non_redistributable,
config.include_non_redistributable
);
}
}

65
src/export/profiles.rs Normal file
View file

@ -0,0 +1,65 @@
use super::rules::Rule;
use crate::error::{PakkerError, Result};
pub trait ExportProfile {
fn name(&self) -> &str;
fn rules(&self) -> Vec<Box<dyn Rule>>;
}
pub struct CurseForgeProfile;
pub struct ModrinthProfile;
pub struct ServerPackProfile;
impl ExportProfile for CurseForgeProfile {
fn name(&self) -> &'static str {
"curseforge"
}
fn rules(&self) -> Vec<Box<dyn Rule>> {
vec![
Box::new(super::rules::CopyProjectFilesRule),
Box::new(super::rules::FilterByPlatformRule),
Box::new(super::rules::CopyOverridesRule),
Box::new(super::rules::GenerateManifestRule::curseforge()),
Box::new(super::rules::FilterNonRedistributableRule),
]
}
}
impl ExportProfile for ModrinthProfile {
fn name(&self) -> &'static str {
"modrinth"
}
fn rules(&self) -> Vec<Box<dyn Rule>> {
vec![
Box::new(super::rules::CopyProjectFilesRule),
Box::new(super::rules::FilterByPlatformRule),
Box::new(super::rules::CopyOverridesRule),
Box::new(super::rules::GenerateManifestRule::modrinth()),
]
}
}
impl ExportProfile for ServerPackProfile {
fn name(&self) -> &'static str {
"serverpack"
}
fn rules(&self) -> Vec<Box<dyn Rule>> {
vec![
Box::new(super::rules::CopyProjectFilesRule),
Box::new(super::rules::CopyServerOverridesRule),
Box::new(super::rules::FilterClientOnlyRule),
]
}
}
pub fn create_profile(name: &str) -> Result<Box<dyn ExportProfile>> {
match name {
"curseforge" => Ok(Box::new(CurseForgeProfile)),
"modrinth" => Ok(Box::new(ModrinthProfile)),
"serverpack" => Ok(Box::new(ServerPackProfile)),
_ => Err(PakkerError::InvalidExportProfile(name.to_string())),
}
}

849
src/export/rules.rs Normal file
View file

@ -0,0 +1,849 @@
use std::{fs, path::PathBuf};
use async_trait::async_trait;
use crate::{
error::Result,
model::{Config, LockFile, ProjectSide},
};
#[derive(Clone)]
pub struct RuleContext {
pub lockfile: LockFile,
pub config: Config,
pub profile_config: Option<crate::export::ProfileConfig>,
pub export_path: PathBuf,
pub base_path: PathBuf,
pub ui: Option<indicatif::ProgressBar>,
}
pub trait Rule: Send + Sync {
fn matches(&self, context: &RuleContext) -> bool;
fn effects(&self) -> Vec<Box<dyn Effect>>;
}
#[async_trait]
pub trait Effect: Send + Sync {
fn name(&self) -> &str;
async fn execute(&self, context: &RuleContext) -> Result<()>;
}
// Rule: Copy project files
pub struct CopyProjectFilesRule;
impl Rule for CopyProjectFilesRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(CopyProjectFilesEffect)]
}
}
pub struct CopyProjectFilesEffect;
#[async_trait]
impl Effect for CopyProjectFilesEffect {
fn name(&self) -> &'static str {
"Downloading and copying mod files"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
use crate::model::ResolvedCredentials;
// Resolve credentials (env -> keyring -> Pakker file -> Pakku file).
let credentials = ResolvedCredentials::load()?;
let curseforge_key =
credentials.curseforge_api_key().map(ToOwned::to_owned);
let modrinth_token = credentials.modrinth_token().map(ToOwned::to_owned);
let mods_dir = context.export_path.join("mods");
fs::create_dir_all(&mods_dir)?;
for project in &context.lockfile.projects {
if !project.export {
continue;
}
if let Some(file) = project.files.first() {
let source = context.base_path.join("mods").join(&file.file_name);
let dest = mods_dir.join(&file.file_name);
if source.exists() {
fs::copy(&source, &dest)?;
if let Some(ui) = &context.ui {
ui.println(format!("fetched {} (local)", file.file_name));
}
log::info!("fetched {} (local)", file.file_name);
} else if !file.url.is_empty() {
download_file(
&context.base_path,
&file.file_name,
&file.url,
curseforge_key.as_deref(),
modrinth_token.as_deref(),
)
.await?;
// Copy into export mods/ after ensuring it is present in base mods/
let downloaded = context.base_path.join("mods").join(&file.file_name);
if downloaded.exists() {
fs::copy(&downloaded, &dest)?;
if let Some(ui) = &context.ui {
ui.println(format!("fetched {} (download)", file.file_name));
}
log::info!("fetched {} (download)", file.file_name);
} else {
return Err(crate::error::PakkerError::InternalError(format!(
"download reported success but file is missing: {}",
file.file_name
)));
}
} else {
return Err(crate::error::PakkerError::InternalError(format!(
"missing mod file and no download url: {}",
file.file_name
)));
}
}
}
Ok(())
}
}
#[derive(Debug)]
enum DownloadFailure {
Auth(String),
Retryable(String),
Fatal(String),
}
fn classify_response(
status: reqwest::StatusCode,
body: &str,
) -> DownloadFailure {
if status == reqwest::StatusCode::UNAUTHORIZED
|| status == reqwest::StatusCode::FORBIDDEN
{
return DownloadFailure::Auth(format!(
"http {}: {}",
status.as_u16(),
body
));
}
if status == reqwest::StatusCode::TOO_MANY_REQUESTS
|| status.is_server_error()
{
return DownloadFailure::Retryable(format!(
"http {}: {}",
status.as_u16(),
body
));
}
DownloadFailure::Fatal(format!("http {}: {}", status.as_u16(), body))
}
fn classify_reqwest_error(err: &reqwest::Error) -> DownloadFailure {
if err.is_timeout() || err.is_connect() {
return DownloadFailure::Retryable(err.to_string());
}
DownloadFailure::Fatal(err.to_string())
}
async fn download_file(
base_path: &std::path::Path,
file_name: &str,
url: &str,
curseforge_key: Option<&str>,
modrinth_token: Option<&str>,
) -> Result<()> {
if url.is_empty() {
return Err(crate::error::PakkerError::InternalError(format!(
"cannot download empty url for {file_name}"
)));
}
let client = reqwest::ClientBuilder::new()
.redirect(reqwest::redirect::Policy::default())
.build()?;
let mut request_builder = client.get(url);
// Credentials are optional for direct file downloads; only attach them when
// available. Hard failures are determined via HTTP status codes (401/403)
// during the request.
if url.contains("curseforge") {
if let Some(key) = curseforge_key {
request_builder = request_builder.header("x-api-key", key);
}
} else if url.contains("modrinth")
&& let Some(token) = modrinth_token
{
request_builder = request_builder.header("Authorization", token);
}
let attempts: usize = 5;
for attempt in 1..=attempts {
let response = request_builder.try_clone().unwrap().send().await;
match response {
Ok(resp) if resp.status().is_success() => {
let bytes = resp.bytes().await?;
let mods_dir = base_path.join("mods");
fs::create_dir_all(&mods_dir)?;
let dest = mods_dir.join(file_name);
std::fs::write(&dest, &bytes)?;
return Ok(());
},
Ok(resp) => {
let status = resp.status();
let body = resp.text().await.unwrap_or_default();
match classify_response(status, &body) {
DownloadFailure::Auth(msg) => {
return Err(crate::error::PakkerError::InternalError(format!(
"authentication error while downloading {file_name}: {msg}"
)));
},
DownloadFailure::Retryable(msg) => {
if attempt == attempts {
return Err(crate::error::PakkerError::InternalError(format!(
"retryable download error (attempts exhausted) for \
{file_name}: {msg}"
)));
}
tokio::time::sleep(std::time::Duration::from_millis(
250u64.saturating_mul(attempt as u64),
))
.await;
},
DownloadFailure::Fatal(msg) => {
return Err(crate::error::PakkerError::InternalError(format!(
"download failed for {file_name}: {msg}"
)));
},
}
},
Err(err) => {
match classify_reqwest_error(&err) {
DownloadFailure::Retryable(msg) => {
if attempt == attempts {
return Err(crate::error::PakkerError::InternalError(format!(
"retryable download error (attempts exhausted) for \
{file_name}: {msg}"
)));
}
tokio::time::sleep(std::time::Duration::from_millis(
250u64.saturating_mul(attempt as u64),
))
.await;
},
DownloadFailure::Fatal(msg) | DownloadFailure::Auth(msg) => {
return Err(crate::error::PakkerError::InternalError(format!(
"download error for {file_name}: {msg}"
)));
},
}
},
}
}
Err(crate::error::PakkerError::InternalError(format!(
"download failed for {file_name} (unknown error)"
)))
}
// Rule: Copy overrides
pub struct CopyOverridesRule;
impl Rule for CopyOverridesRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(CopyOverridesEffect)]
}
}
pub struct CopyOverridesEffect;
#[async_trait]
impl Effect for CopyOverridesEffect {
fn name(&self) -> &'static str {
"Copying override files"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Use profile-specific overrides if available, otherwise use global config
let overrides = if let Some(profile_config) = &context.profile_config {
profile_config.get_overrides(&context.config.overrides)
} else {
&context.config.overrides
};
for override_path in overrides {
let source = context.base_path.join(override_path);
if !source.exists() {
continue;
}
let dest = context.export_path.join(override_path);
copy_recursive(&source, &dest)?;
}
Ok(())
}
}
// Rule: Copy server overrides
pub struct CopyServerOverridesRule;
impl Rule for CopyServerOverridesRule {
fn matches(&self, context: &RuleContext) -> bool {
context.config.server_overrides.is_some()
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(CopyServerOverridesEffect)]
}
}
pub struct CopyServerOverridesEffect;
#[async_trait]
impl Effect for CopyServerOverridesEffect {
fn name(&self) -> &'static str {
"Copying server override files"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Use profile-specific server overrides if available, otherwise use global
// config
let server_overrides = if let Some(profile_config) = &context.profile_config
{
profile_config
.get_server_overrides(context.config.server_overrides.as_ref())
} else {
context.config.server_overrides.as_deref()
};
if let Some(overrides) = server_overrides {
for override_path in overrides {
let source = context.base_path.join(override_path);
if !source.exists() {
continue;
}
let dest = context.export_path.join(override_path);
copy_recursive(&source, &dest)?;
}
}
Ok(())
}
}
// Rule: Filter client-only projects
pub struct FilterClientOnlyRule;
impl Rule for FilterClientOnlyRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(FilterClientOnlyEffect)]
}
}
pub struct FilterClientOnlyEffect;
#[async_trait]
impl Effect for FilterClientOnlyEffect {
fn name(&self) -> &'static str {
"Filtering client-only mods"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Check if we should include client-only mods (profile config can override)
let include_client_only = context
.profile_config
.as_ref()
.and_then(|pc| pc.include_client_only)
.unwrap_or(false);
if include_client_only {
// Don't filter anything
return Ok(());
}
let mods_dir = context.export_path.join("mods");
for project in &context.lockfile.projects {
if project.side == ProjectSide::Client
&& let Some(file) = project.files.first()
{
let file_path = mods_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(file_path)?;
}
}
}
Ok(())
}
}
// Rule: Filter non-redistributable
pub struct FilterNonRedistributableRule;
impl Rule for FilterNonRedistributableRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(FilterNonRedistributableEffect)]
}
}
pub struct FilterNonRedistributableEffect;
#[async_trait]
impl Effect for FilterNonRedistributableEffect {
fn name(&self) -> &'static str {
"Filtering non-redistributable mods"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
// Check if we should include non-redistributable mods (profile config can
// override)
let include_non_redistributable = context
.profile_config
.as_ref()
.and_then(|pc| pc.include_non_redistributable)
.unwrap_or(false);
if include_non_redistributable {
// Don't filter anything
return Ok(());
}
let mods_dir = context.export_path.join("mods");
for project in &context.lockfile.projects {
if !project.redistributable
&& let Some(file) = project.files.first()
{
let file_path = mods_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(file_path)?;
log::info!("Filtered non-redistributable: {}", file.file_name);
}
}
}
Ok(())
}
}
// Rule: Generate manifest
pub struct GenerateManifestRule {
platform: String,
}
impl GenerateManifestRule {
pub fn curseforge() -> Self {
Self {
platform: "curseforge".to_string(),
}
}
pub fn modrinth() -> Self {
Self {
platform: "modrinth".to_string(),
}
}
}
impl Rule for GenerateManifestRule {
fn matches(&self, _context: &RuleContext) -> bool {
true
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(GenerateManifestEffect {
platform: self.platform.clone(),
})]
}
}
pub struct GenerateManifestEffect {
platform: String,
}
#[async_trait]
impl Effect for GenerateManifestEffect {
fn name(&self) -> &'static str {
"Generating manifest file"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
let (manifest, filename) = if self.platform == "curseforge" {
(generate_curseforge_manifest(context)?, "manifest.json")
} else if self.platform == "modrinth" {
(generate_modrinth_manifest(context)?, "modrinth.index.json")
} else {
return Ok(());
};
let manifest_path = context.export_path.join(filename);
fs::write(manifest_path, manifest)?;
Ok(())
}
}
fn generate_curseforge_manifest(context: &RuleContext) -> Result<String> {
use serde_json::json;
let files: Vec<_> = context
.lockfile
.projects
.iter()
.filter(|p| p.export)
.filter_map(|p| {
p.get_platform_id("curseforge").and_then(|id| {
p.files.first().map(|f| {
json!({
"projectID": id.parse::<u32>().unwrap_or(0),
"fileID": f.id.parse::<u32>().unwrap_or(0),
"required": true
})
})
})
})
.collect();
let manifest = json!({
"minecraft": {
"version": context.lockfile.mc_versions.first().unwrap_or(&"1.20.1".to_string()),
"modLoaders": context.lockfile.loaders.iter().map(|(name, version)| {
json!({
"id": format!("{}-{}", name, version),
"primary": true
})
}).collect::<Vec<_>>()
},
"manifestType": "minecraftModpack",
"manifestVersion": 1,
"name": context.config.name,
"version": context.config.version,
"author": context.config.author.clone().unwrap_or_default(),
"files": files,
"overrides": "overrides"
});
Ok(serde_json::to_string_pretty(&manifest)?)
}
fn generate_modrinth_manifest(context: &RuleContext) -> Result<String> {
use serde_json::json;
let files: Vec<_> = context
.lockfile
.projects
.iter()
.filter(|p| p.export)
.filter_map(|p| {
p.get_platform_id("modrinth").and_then(|_id| {
p.files.first().map(|f| {
let mut env = serde_json::Map::new();
match p.side {
crate::model::ProjectSide::Client => {
env.insert("client".to_string(), json!("required"));
env.insert("server".to_string(), json!("unsupported"));
},
crate::model::ProjectSide::Server => {
env.insert("client".to_string(), json!("unsupported"));
env.insert("server".to_string(), json!("required"));
},
crate::model::ProjectSide::Both => {
env.insert("client".to_string(), json!("required"));
env.insert("server".to_string(), json!("required"));
},
}
json!({
"path": format!("mods/{}", f.file_name),
"hashes": f.hashes,
"env": env,
"downloads": [f.url.clone()],
"fileSize": f.size
})
})
})
})
.collect();
// Build dependencies dynamically based on loaders present
let mut dependencies = serde_json::Map::new();
dependencies.insert(
"minecraft".to_string(),
json!(
context
.lockfile
.mc_versions
.first()
.unwrap_or(&"1.20.1".to_string())
),
);
for (loader_name, loader_version) in &context.lockfile.loaders {
let dep_key = format!("{loader_name}-loader");
dependencies.insert(dep_key, json!(loader_version));
}
let manifest = json!({
"formatVersion": 1,
"game": "minecraft",
"versionId": context.config.version,
"name": context.config.name,
"summary": context.config.description.clone().unwrap_or_default(),
"files": files,
"dependencies": dependencies
});
Ok(serde_json::to_string_pretty(&manifest)?)
}
fn copy_recursive(
source: &std::path::Path,
dest: &std::path::Path,
) -> Result<()> {
if source.is_file() {
if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?;
}
fs::copy(source, dest)?;
} else if source.is_dir() {
fs::create_dir_all(dest)?;
for entry in fs::read_dir(source)? {
let entry = entry?;
let target = dest.join(entry.file_name());
copy_recursive(&entry.path(), &target)?;
}
}
Ok(())
}
// Rule: Filter projects by platform
pub struct FilterByPlatformRule;
impl Rule for FilterByPlatformRule {
fn matches(&self, context: &RuleContext) -> bool {
// Only match if profile config specifies a platform filter
context
.profile_config
.as_ref()
.and_then(|pc| pc.filter_platform.as_ref())
.is_some()
}
fn effects(&self) -> Vec<Box<dyn Effect>> {
vec![Box::new(FilterByPlatformEffect)]
}
}
pub struct FilterByPlatformEffect;
#[async_trait]
impl Effect for FilterByPlatformEffect {
fn name(&self) -> &'static str {
"Filtering projects by platform availability"
}
async fn execute(&self, context: &RuleContext) -> Result<()> {
if let Some(profile_config) = &context.profile_config
&& let Some(platform) = &profile_config.filter_platform
{
let mods_dir = context.export_path.join("mods");
for project in &context.lockfile.projects {
// Check if project is available on the target platform
let has_platform = project.get_platform_id(platform).is_some();
if !has_platform {
// Remove the file if it was copied
if let Some(file) = project.files.first() {
let file_path = mods_dir.join(&file.file_name);
if file_path.exists() {
fs::remove_file(file_path)?;
log::info!(
"Filtered {} (not available on {})",
file.file_name,
platform
);
}
}
}
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::*;
use crate::{export::ProfileConfig, model::LockFile};
fn create_test_context(profile_config: Option<ProfileConfig>) -> RuleContext {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
RuleContext {
lockfile: LockFile {
target: None,
projects: vec![],
mc_versions: vec!["1.20.1".to_string()],
loaders,
lockfile_version: 1,
},
config: Config {
name: "Test Pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: Some(vec!["server-overrides".to_string()]),
client_overrides: Some(vec!["client-overrides".to_string()]),
paths: HashMap::new(),
projects: None,
export_profiles: None,
},
profile_config,
export_path: PathBuf::from("/tmp/export"),
base_path: PathBuf::from("/tmp/base"),
ui: None,
}
}
#[test]
fn test_filter_by_platform_rule_matches_with_platform_filter() {
let profile_config = ProfileConfig {
filter_platform: Some("modrinth".to_string()),
..Default::default()
};
let context = create_test_context(Some(profile_config));
let rule = FilterByPlatformRule;
assert!(rule.matches(&context));
}
#[test]
fn test_filter_by_platform_rule_no_match_without_platform_filter() {
let context = create_test_context(None);
let rule = FilterByPlatformRule;
assert!(!rule.matches(&context));
}
#[test]
fn test_filter_by_platform_rule_no_match_with_empty_profile_config() {
let profile_config = ProfileConfig::default();
let context = create_test_context(Some(profile_config));
let rule = FilterByPlatformRule;
assert!(!rule.matches(&context));
}
#[test]
fn test_copy_overrides_uses_profile_config() {
let profile_config = ProfileConfig {
overrides: Some(vec!["custom-overrides".to_string()]),
..Default::default()
};
let context = create_test_context(Some(profile_config));
assert!(context.profile_config.is_some());
assert_eq!(
context
.profile_config
.as_ref()
.unwrap()
.overrides
.as_ref()
.unwrap(),
&vec!["custom-overrides".to_string()]
);
}
#[test]
fn test_filter_non_redistributable_respects_profile_config() {
let profile_config = ProfileConfig {
include_non_redistributable: Some(true),
..Default::default()
};
let context = create_test_context(Some(profile_config));
assert_eq!(
context
.profile_config
.as_ref()
.unwrap()
.include_non_redistributable,
Some(true)
);
}
#[test]
fn test_filter_client_only_respects_profile_config() {
let profile_config = ProfileConfig {
include_client_only: Some(true),
..Default::default()
};
let context = create_test_context(Some(profile_config));
assert_eq!(
context.profile_config.as_ref().unwrap().include_client_only,
Some(true)
);
}
#[test]
fn test_server_overrides_uses_profile_config() {
let profile_config = ProfileConfig {
server_overrides: Some(vec!["custom-server-overrides".to_string()]),
..Default::default()
};
let context = create_test_context(Some(profile_config));
let server_overrides = context
.profile_config
.as_ref()
.unwrap()
.get_server_overrides(context.config.server_overrides.as_ref());
assert!(server_overrides.is_some());
assert_eq!(server_overrides.unwrap(), &["custom-server-overrides"]);
}
#[test]
fn test_profile_config_fallback_to_global() {
let context = create_test_context(None);
assert!(context.profile_config.is_none());
assert_eq!(context.config.overrides, vec!["overrides"]);
}
}

260
src/fetch.rs Normal file
View file

@ -0,0 +1,260 @@
use std::{
fs,
path::{Path, PathBuf},
};
use indicatif::{ProgressBar, ProgressStyle};
use reqwest::Client;
use crate::{
error::{PakkerError, Result},
model::{Config, LockFile, Project, ProjectFile},
utils::verify_hash,
};
pub struct Fetcher {
client: Client,
base_path: PathBuf,
}
pub struct FileFetcher {
client: Client,
base_path: PathBuf,
}
impl Fetcher {
pub fn new<P: AsRef<Path>>(base_path: P) -> Self {
Self {
client: Client::new(),
base_path: base_path.as_ref().to_path_buf(),
}
}
pub async fn fetch_all(
&self,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
let fetcher = FileFetcher {
client: self.client.clone(),
base_path: self.base_path.clone(),
};
fetcher.fetch_all(lockfile, config).await
}
pub async fn sync(&self, lockfile: &LockFile, config: &Config) -> Result<()> {
self.fetch_all(lockfile, config).await
}
}
impl FileFetcher {
/// Fetch all project files according to lockfile
pub async fn fetch_all(
&self,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
let exportable_projects: Vec<_> =
lockfile.projects.iter().filter(|p| p.export).collect();
let total = exportable_projects.len();
let spinner = ProgressBar::new(total as u64);
spinner.set_style(
ProgressStyle::default_spinner()
.template("{spinner:.green} {msg}")
.unwrap(),
);
for (idx, project) in exportable_projects.iter().enumerate() {
let name = project
.name
.values()
.next()
.map_or("unknown", std::string::String::as_str);
spinner.set_message(format!("Fetching {} ({}/{})", name, idx + 1, total));
self.fetch_project(project, lockfile, config).await?;
}
spinner.finish_with_message("All projects fetched");
// Sync overrides
self.sync_overrides(config)?;
Ok(())
}
/// Fetch files for a single project
pub async fn fetch_project(
&self,
project: &Project,
lockfile: &LockFile,
config: &Config,
) -> Result<()> {
// Select the best file for this project
let file = self.select_best_file(project, lockfile)?;
// Determine target path
let target_path = self.get_target_path(project, file, config);
// Check if file already exists and is valid
if target_path.exists()
&& let Some((algo, expected_hash)) = file.hashes.iter().next()
&& verify_hash(&target_path, algo, expected_hash)?
{
log::info!("File already exists and is valid: {}", file.file_name);
return Ok(());
}
// Download file
log::info!("Downloading: {}", file.file_name);
self.download_file(&file.url, &target_path).await?;
// Verify hash
if let Some((algo, expected_hash)) = file.hashes.iter().next()
&& !verify_hash(&target_path, algo, expected_hash)?
{
fs::remove_file(&target_path)?;
return Err(PakkerError::HashMismatch {
file: file.file_name.clone(),
expected: expected_hash.clone(),
actual: "mismatch".to_string(),
});
}
log::info!("Successfully downloaded: {}", file.file_name);
Ok(())
}
/// Select the best file for a project based on constraints
fn select_best_file<'a>(
&self,
project: &'a Project,
lockfile: &LockFile,
) -> Result<&'a ProjectFile> {
let compatible_files: Vec<&ProjectFile> = project
.files
.iter()
.filter(|f| {
f.is_compatible(&lockfile.mc_versions, &lockfile.get_loader_names())
})
.collect();
if compatible_files.is_empty() {
return Err(PakkerError::FileNotFound(format!(
"No compatible files for project: {:?}",
project.name.values().next()
)));
}
// Prefer release over beta over alpha
let best = compatible_files
.iter()
.max_by_key(|f| {
let type_priority = match f.release_type {
crate::model::ReleaseType::Release => 3,
crate::model::ReleaseType::Beta => 2,
crate::model::ReleaseType::Alpha => 1,
};
(type_priority, &f.date_published)
})
.unwrap();
Ok(best)
}
/// Get target path for a project file
fn get_target_path(
&self,
project: &Project,
file: &ProjectFile,
config: &Config,
) -> PathBuf {
let mut path = self.base_path.clone();
// Check for custom path in config
if let Some(custom_path) = config.paths.get(&project.r#type.to_string()) {
path.push(custom_path);
} else {
// Default path based on project type
path.push(self.get_default_path(&project.r#type));
}
// Add subpath if specified
if let Some(subpath) = &project.subpath {
path.push(subpath);
}
path.push(&file.file_name);
path
}
/// Get default path for project type
const fn get_default_path(
&self,
project_type: &crate::model::ProjectType,
) -> &str {
match project_type {
crate::model::ProjectType::Mod => "mods",
crate::model::ProjectType::ResourcePack => "resourcepacks",
crate::model::ProjectType::DataPack => "datapacks",
crate::model::ProjectType::Shader => "shaderpacks",
crate::model::ProjectType::World => "saves",
}
}
/// Download a file from URL to target path
async fn download_file(&self, url: &str, target_path: &Path) -> Result<()> {
// Create parent directory
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent)?;
}
// Download file
let response = self.client.get(url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::DownloadFailed(url.to_string()));
}
let bytes = response.bytes().await?;
// Write to temporary file first (atomic write)
let temp_path = target_path.with_extension("tmp");
fs::write(&temp_path, bytes)?;
fs::rename(temp_path, target_path)?;
Ok(())
}
/// Sync override directories
fn sync_overrides(&self, config: &Config) -> Result<()> {
for override_path in &config.overrides {
let source = self.base_path.join(override_path);
if !source.exists() {
continue;
}
// Copy override files to target locations
self.copy_recursive(&source, &self.base_path)?;
}
Ok(())
}
/// Copy directory recursively
fn copy_recursive(&self, source: &Path, dest: &Path) -> Result<()> {
if source.is_file() {
fs::copy(source, dest)?;
} else if source.is_dir() {
fs::create_dir_all(dest)?;
for entry in fs::read_dir(source)? {
let entry = entry?;
let target = dest.join(entry.file_name());
self.copy_recursive(&entry.path(), &target)?;
}
}
Ok(())
}
}

589
src/git/mod.rs Normal file
View file

@ -0,0 +1,589 @@
use std::path::Path;
use git2::{
Cred,
FetchOptions,
Oid,
RemoteCallbacks,
Repository,
ResetType,
build::RepoBuilder,
};
use crate::error::{PakkerError, Result};
/// Check if a directory is a Git repository
pub fn is_git_repository<P: AsRef<Path>>(path: P) -> bool {
Repository::open(path).is_ok()
}
/// Get the URL of a remote
pub fn get_remote_url<P: AsRef<Path>>(
path: P,
remote_name: &str,
) -> Result<String> {
let repo = Repository::open(path)?;
let remote = repo.find_remote(remote_name).map_err(|e| {
PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}"))
})?;
remote
.url()
.ok_or_else(|| {
PakkerError::GitError("Remote URL is not valid UTF-8".to_string())
})
.map(std::string::ToString::to_string)
}
pub fn get_current_commit_sha<P: AsRef<Path>>(
path: P,
ref_name: Option<&str>,
) -> Result<String> {
let repo = Repository::open(path)?;
let commit = if let Some(ref_name) = ref_name {
let obj = repo.revparse_single(ref_name)?;
obj.peel_to_commit()?
} else {
let head = repo.head()?;
head.peel_to_commit()?
};
Ok(commit.id().to_string())
}
/// Get the commit SHA for a specific ref (alias for compatibility)
pub fn get_commit_sha<P: AsRef<Path>>(
path: P,
ref_name: &str,
) -> Result<String> {
get_current_commit_sha(path, Some(ref_name))
}
/// Clone a Git repository
pub fn clone_repository<P: AsRef<Path>>(
url: &str,
target_path: P,
ref_name: &str,
progress_callback: Option<
Box<dyn FnMut(usize, usize, Option<usize>) + 'static>,
>,
) -> Result<Repository> {
let target_path = target_path.as_ref();
// Check if target directory exists and is not empty
if target_path.exists() {
let is_empty = target_path.read_dir()?.next().is_none();
if !is_empty {
return Err(PakkerError::GitError(format!(
"Target directory is not empty: {}",
target_path.display()
)));
}
}
let mut callbacks = RemoteCallbacks::new();
// Setup SSH key authentication
callbacks.credentials(|_url, username_from_url, _allowed_types| {
let username = username_from_url.unwrap_or("git");
Cred::ssh_key_from_agent(username)
});
// Setup progress callback if provided
if let Some(mut progress_fn) = progress_callback {
callbacks.transfer_progress(move |stats| {
progress_fn(
stats.received_objects(),
stats.total_objects(),
Some(stats.received_bytes()),
);
true
});
}
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
let mut builder = RepoBuilder::new();
builder.fetch_options(fetch_options);
// Perform the clone. Avoid forcing a branch at clone time because some
// local repositories (or bare repos) may not expose the exact remote
// tracking refs that libgit2 expects. We'll attempt to set the desired
// ref after cloning when possible.
let repo = builder.clone(url, target_path).map_err(|e| {
PakkerError::GitError(format!("Failed to clone repository '{url}': {e}"))
})?;
// If a branch/ref name was requested, try to make HEAD point to it.
// Prefer local branch refs (refs/heads/*), then tags, then raw rev-parse.
let branch_ref = format!("refs/heads/{ref_name}");
if repo.find_reference(&branch_ref).is_ok() {
repo.set_head(&branch_ref).map_err(|e| {
PakkerError::GitError(format!(
"Cloned repository but failed to set HEAD to {branch_ref}: {e}"
))
})?;
} else if let Ok(obj) = repo.revparse_single(ref_name) {
// Create a detached HEAD pointing to the commit/tag
let commit = obj.peel_to_commit().map_err(|e| {
PakkerError::GitError(format!(
"Resolved ref '{ref_name}' but could not peel to commit: {e}"
))
})?;
repo.set_head_detached(commit.id()).map_err(|e| {
PakkerError::GitError(format!(
"Cloned repository but failed to set detached HEAD to {ref_name}: {e}"
))
})?;
}
Ok(repo)
}
/// Fetch updates from a remote
pub fn fetch_updates<P: AsRef<Path>>(
path: P,
remote_name: &str,
ref_name: &str,
progress_callback: Option<
Box<dyn FnMut(usize, usize, Option<usize>) + 'static>,
>,
) -> Result<()> {
let repo = Repository::open(path)?;
let mut remote = repo.find_remote(remote_name).map_err(|e| {
PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}"))
})?;
let mut callbacks = RemoteCallbacks::new();
// Setup SSH key authentication
callbacks.credentials(|_url, username_from_url, _allowed_types| {
let username = username_from_url.unwrap_or("git");
Cred::ssh_key_from_agent(username)
});
// Setup progress callback if provided
if let Some(mut progress_fn) = progress_callback {
callbacks.transfer_progress(move |stats| {
progress_fn(
stats.received_objects(),
stats.total_objects(),
Some(stats.received_bytes()),
);
true
});
}
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
remote
.fetch(&[ref_name], Some(&mut fetch_options), None)
.map_err(|e| {
PakkerError::GitError(format!("Failed to fetch updates: {e}"))
})?;
Ok(())
}
/// Hard reset to a specific ref (like git reset --hard)
pub fn reset_to_ref<P: AsRef<Path>>(
path: P,
remote_name: &str,
ref_name: &str,
) -> Result<()> {
let repo = Repository::open(path)?;
// Construct the full ref path (e.g., "origin/main")
let full_ref = format!("{remote_name}/{ref_name}");
let obj = repo.revparse_single(&full_ref).map_err(|e| {
PakkerError::GitError(format!("Failed to find ref '{full_ref}': {e}"))
})?;
let commit = obj.peel_to_commit().map_err(|e| {
PakkerError::GitError(format!("Failed to resolve ref to commit: {e}"))
})?;
repo
.reset(commit.as_object(), ResetType::Hard, None)
.map_err(|e| {
PakkerError::GitError(format!("Failed to reset to ref: {e}"))
})?;
Ok(())
}
/// Determine the ref type (branch, tag, or commit)
pub fn resolve_ref_type<P: AsRef<Path>>(
path: P,
ref_name: &str,
) -> Result<crate::model::fork::RefType> {
let repo = Repository::open(path)?;
// Check if it's a branch
if repo.find_branch(ref_name, git2::BranchType::Local).is_ok()
|| repo.find_branch(ref_name, git2::BranchType::Remote).is_ok()
{
return Ok(crate::model::fork::RefType::Branch);
}
// Check if it's a tag
let tag_ref = format!("refs/tags/{ref_name}");
if repo.find_reference(&tag_ref).is_ok() {
return Ok(crate::model::fork::RefType::Tag);
}
// Try to resolve as commit SHA
if repo.revparse_single(ref_name).is_ok() {
return Ok(crate::model::fork::RefType::Commit);
}
Err(PakkerError::GitError(format!(
"Could not resolve ref '{ref_name}' as branch, tag, or commit"
)))
}
/// Get the primary remote URL for a repository at path. Prefer 'origin',
/// otherwise first remote with a URL.
pub fn get_primary_remote_url<P: AsRef<Path>>(path: P) -> Result<String> {
let repo = Repository::open(path)?;
if let Ok(remote) = repo.find_remote("origin")
&& let Some(url) = remote.url()
{
return Ok(url.to_string());
}
// Fallback: first remote with a URL
if let Ok(remotes) = repo.remotes() {
for name in remotes.iter().flatten() {
if let Ok(remote) = repo.find_remote(name)
&& let Some(url) = remote.url()
{
return Ok(url.to_string());
}
}
}
Err(PakkerError::GitError(
"No remote with a valid URL found on repository".to_string(),
))
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum VcsType {
Git,
Jujutsu,
None,
}
/// Detect the VCS type for a given path
pub fn detect_vcs_type<P: AsRef<Path>>(path: P) -> VcsType {
let path = path.as_ref();
// Check for jujutsu first (higher priority)
if let Ok(output) = std::process::Command::new("jj")
.args(["root"])
.current_dir(path)
.output()
{
if output.status.success() {
return VcsType::Jujutsu;
}
}
// Check for git
if let Ok(output) = std::process::Command::new("git")
.args(["rev-parse", "--show-toplevel"])
.current_dir(path)
.output()
{
if output.status.success() {
return VcsType::Git;
}
}
VcsType::None
}
/// Check whether the repository has uncommitted changes (working tree or index)
pub fn repo_has_uncommitted_changes<P: AsRef<Path>>(path: P) -> Result<bool> {
let vcs_type = detect_vcs_type(&path);
match vcs_type {
VcsType::Git => {
let repo = Repository::open(path)?;
let statuses = repo.statuses(None)?;
for entry in statuses.iter() {
let s = entry.status();
// Consider any change in index or working tree as uncommitted
if !(s.is_empty()) {
return Ok(true);
}
}
Ok(false)
},
VcsType::Jujutsu => {
// Use jj status to check for changes - look for "The working copy has no
// changes"
let output = std::process::Command::new("jj")
.args(["status"])
.current_dir(path)
.output()
.map_err(|e| {
PakkerError::GitError(format!("Failed to run jj status: {}", e))
})?;
let output_str = String::from_utf8_lossy(&output.stdout);
// Check if the output indicates no changes
Ok(!output_str.contains("The working copy has no changes"))
},
VcsType::None => Ok(false),
}
}
/// Attempt a lightweight fetch of a single ref from the named remote into the
/// repository at path
pub fn fetch_remote_light<P: AsRef<Path>>(
path: P,
remote_name: &str,
ref_name: &str,
) -> Result<()> {
let repo = Repository::open(path)?;
let mut remote = repo.find_remote(remote_name).map_err(|e| {
PakkerError::GitError(format!("Remote '{remote_name}' not found: {e}"))
})?;
let mut callbacks = RemoteCallbacks::new();
callbacks.credentials(|_url, username_from_url, _allowed_types| {
let username = username_from_url.unwrap_or("git");
Cred::ssh_key_from_agent(username)
});
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
// Build a refspec that attempts to fetch the branch into the remote-tracking
// namespace
let fetch_refspec = if ref_name.starts_with("refs/") {
ref_name.to_string()
} else {
format!("refs/heads/{ref_name}:refs/remotes/{remote_name}/{ref_name}")
};
remote
.fetch(&[&fetch_refspec], Some(&mut fetch_options), None)
.map_err(|e| {
PakkerError::GitError(format!("Failed lightweight fetch: {e}"))
})?;
Ok(())
}
/// Resolve a ref name to an Oid (commit)
pub fn get_ref_oid<P: AsRef<Path>>(path: P, ref_name: &str) -> Result<Oid> {
let repo = Repository::open(path)?;
let obj = repo.revparse_single(ref_name).map_err(|e| {
PakkerError::GitError(format!("Failed to resolve ref '{ref_name}': {e}"))
})?;
let commit = obj.peel_to_commit().map_err(|e| {
PakkerError::GitError(format!(
"Failed to peel ref '{ref_name}' to commit: {e}"
))
})?;
Ok(commit.id())
}
/// Count commits reachable from `oid` in `repo`
fn count_commits(repo: &Repository, oid: Oid) -> Result<usize> {
let mut revwalk = repo.revwalk().map_err(|e| {
PakkerError::GitError(format!(
"Failed to create revwalk for counting commits: {e}"
))
})?;
revwalk.push(oid).map_err(|e| {
PakkerError::GitError(format!(
"Failed to start revwalk from oid {oid}: {e}"
))
})?;
let mut count = 0usize;
for _ in revwalk {
count += 1;
}
Ok(count)
}
/// Compute how many commits `local_ref` is ahead/behind `remote_ref`
pub fn ahead_behind<P: AsRef<Path>>(
path: P,
local_ref: &str,
remote_ref: &str,
) -> Result<(usize, usize)> {
let repo = Repository::open(&path)?;
// Try to resolve local OID
let local_oid = match get_ref_oid(&path, local_ref) {
Ok(oid) => oid,
Err(e) => {
return Err(PakkerError::GitError(format!(
"Local ref not found '{local_ref}': {e}"
)));
},
};
// Try to resolve remote OID. If remote ref is missing, consider remote empty
// and count all commits in local as "ahead".
if let Ok(remote_oid) = get_ref_oid(&path, remote_ref) {
let (ahead, behind) = repo
.graph_ahead_behind(local_oid, remote_oid)
.map_err(|e| {
PakkerError::GitError(format!("Failed to compute ahead/behind: {e}"))
})?;
Ok((ahead, behind))
} else {
// Remote ref missing — count commits reachable from local
let ahead_count = count_commits(&repo, local_oid)?;
Ok((ahead_count, 0))
}
}
/// Set the URL for a remote in the repository
pub fn set_remote_url<P: AsRef<Path>>(
path: P,
remote_name: &str,
url: &str,
) -> Result<()> {
let repo = Repository::open(path)?;
repo.remote_set_url(remote_name, url).map_err(|e| {
PakkerError::GitError(format!("Failed to set remote URL: {e}"))
})?;
Ok(())
}
#[cfg(test)]
mod tests {
use std::{fs::File, io::Write};
use git2::{Repository, Signature};
use tempfile::tempdir;
use super::*;
fn init_bare_repo(path: &std::path::Path) -> Repository {
Repository::init_bare(path).expect("init bare")
}
fn init_repo_with_commit(
path: &std::path::Path,
file_name: &str,
content: &str,
branch: &str,
) -> Repository {
let repo = Repository::init(path).expect("init repo");
let sig = Signature::now("Test", "test@example.com").unwrap();
let mut index = repo.index().unwrap();
let file_path = path.join(file_name);
let mut f = File::create(&file_path).unwrap();
writeln!(f, "{}", content).unwrap();
drop(f);
index.add_path(std::path::Path::new(file_name)).unwrap();
let tree_id = index.write_tree().unwrap();
// limit the scope of tree to avoid borrow while moving repo
{
let tree = repo.find_tree(tree_id).unwrap();
let _commit_id = repo
.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])
.unwrap();
}
// Create branch pointing at HEAD and set HEAD
let head_oid = repo.refname_to_id("HEAD").unwrap();
repo
.branch(branch, &repo.find_commit(head_oid).unwrap(), true)
.unwrap();
repo.set_head(&format!("refs/heads/{}", branch)).unwrap();
repo
}
#[test]
fn test_is_git_repository_and_remote_url() {
let tmp = tempdir().unwrap();
let repo_path = tmp.path().join("repo");
let _repo = init_repo_with_commit(&repo_path, "a.txt", "hello", "master");
assert!(is_git_repository(&repo_path));
}
#[test]
fn test_fetch_remote_light_and_ahead_behind() {
let tmp = tempdir().unwrap();
let bare_path = tmp.path().join("bare.git");
let _bare = init_bare_repo(&bare_path);
let work_path = tmp.path().join("work");
let repo = init_repo_with_commit(&work_path, "a.txt", "hello", "master");
// Add bare remote and push
repo.remote("origin", bare_path.to_str().unwrap()).unwrap();
let mut remote = repo.find_remote("origin").unwrap();
remote.connect(git2::Direction::Push).unwrap();
remote
.push(&["refs/heads/master:refs/heads/master"], None)
.unwrap();
// Ensure bare HEAD points to master
let bare_repo = Repository::open(&bare_path).unwrap();
bare_repo.set_head("refs/heads/master").unwrap();
// Now test fetch_remote_light against the work repo (fetch from origin into
// work should succeed)
assert!(fetch_remote_light(&work_path, "origin", "master").is_ok());
// Test ahead_behind with remote tracking ref
let (ahead, behind) = ahead_behind(
&work_path,
"refs/heads/master",
"refs/remotes/origin/master",
)
.unwrap();
assert_eq!(ahead, 0);
assert_eq!(behind, 0);
}
#[test]
fn test_clone_repository_and_origin_rewrite_integration() {
let tmp = tempdir().unwrap();
let bare_path = tmp.path().join("upstream.git");
let _bare = init_bare_repo(&bare_path);
let work_path = tmp.path().join("workrepo");
let repo = init_repo_with_commit(&work_path, "b.txt", "hello2", "master");
// Add remote upstream and push
repo.remote("origin", bare_path.to_str().unwrap()).unwrap();
let mut remote = repo.find_remote("origin").unwrap();
remote.connect(git2::Direction::Push).unwrap();
remote
.push(&["refs/heads/master:refs/heads/master"], None)
.unwrap();
let bare_repo = Repository::open(&bare_path).unwrap();
bare_repo.set_head("refs/heads/master").unwrap();
// Now clone from the local path into a new dir
let clone_target = tmp.path().join("clone_target");
let _cloned = clone_repository(
bare_path.to_str().unwrap(),
&clone_target,
"master",
None,
)
.expect("clone");
// After cloning from a local path, simulate rewriting origin to the
// upstream network URL
set_remote_url(&clone_target, "origin", "https://example.com/upstream.git")
.unwrap();
let url = get_remote_url(&clone_target, "origin").unwrap();
assert_eq!(url, "https://example.com/upstream.git");
}
}

1326
src/ipc.rs Normal file

File diff suppressed because it is too large Load diff

176
src/main.rs Normal file
View file

@ -0,0 +1,176 @@
mod cli;
mod error;
mod export;
mod fetch;
mod git;
mod ipc;
mod model;
mod platform;
mod rate_limiter;
mod resolver;
mod ui_utils;
mod utils;
use std::path::PathBuf;
use clap::Parser;
use cli::{Cli, Commands};
use error::PakkerError;
use crate::rate_limiter::RateLimiter;
#[tokio::main]
async fn main() -> Result<(), PakkerError> {
let cli = Cli::parse();
// Initialize logging based on verbosity level
let log_level = match cli.verbose {
0 => "warn", // Default: only warnings and errors
1 => "info", // -v: info level
2 => "debug", // -vv: debug level
_ => "trace", // -vvv+: trace level (most verbose)
};
env_logger::Builder::from_env(
env_logger::Env::default().default_filter_or(log_level),
)
.format_timestamp(None)
.format_module_path(false)
.init();
let working_dir = PathBuf::from(".");
let lockfile_path = working_dir.join("pakker-lock.json");
let config_path = working_dir.join("pakker.json");
let _rate_limiter = std::sync::Arc::new(RateLimiter::new(None));
match cli.command {
Commands::Init(args) => {
cli::commands::init::execute(args, &lockfile_path, &config_path).await
},
Commands::Import(args) => {
cli::commands::import::execute(args, &lockfile_path, &config_path).await
},
Commands::Add(args) => {
cli::commands::add::execute(args, &lockfile_path, &config_path).await
},
Commands::AddPrj(args) => {
cli::commands::add_prj::execute(
args.curseforge.clone(),
args.modrinth.clone(),
args.github.clone(),
args.project_type,
args.side,
args.strategy,
args.redistributable,
args.subpath.clone(),
args.aliases.clone(),
args.export,
args.no_deps,
args.yes,
&lockfile_path,
&config_path,
)
.await
},
Commands::Rm(args) => {
cli::commands::rm::execute(args, &lockfile_path, &config_path).await
},
Commands::Update(args) => {
cli::commands::update::execute(args, &lockfile_path, &config_path).await
},
Commands::Ls(args) => cli::commands::ls::execute(args, &lockfile_path),
Commands::Set(args) => {
cli::commands::set::execute(args, &lockfile_path, &config_path).await
},
Commands::Link(args) => cli::commands::link::execute(args, &lockfile_path),
Commands::Unlink(args) => {
cli::commands::unlink::execute(args, &lockfile_path)
},
Commands::Diff(args) => cli::commands::diff::execute(args, &lockfile_path),
Commands::Fetch(args) => {
cli::commands::fetch::execute(args, &lockfile_path, &config_path).await
},
Commands::Sync(args) => {
cli::commands::sync::execute(args, &lockfile_path, &config_path).await
},
Commands::Export(args) => {
cli::commands::export::execute(args, &lockfile_path, &config_path).await
},
Commands::Remote(args) => cli::commands::remote::execute(args).await,
Commands::RemoteUpdate(args) => {
cli::commands::remote_update::execute(args).await
},
Commands::Status(args) => {
cli::commands::status::execute(
args.parallel,
&lockfile_path,
&config_path,
)
.await
},
Commands::Inspect(args) => {
cli::commands::inspect::execute(
args.projects,
&lockfile_path,
&config_path,
)
.await
},
Commands::Credentials(args) => {
match &args.subcommand {
Some(cli::CredentialsSubcommand::Set(set_args)) => {
cli::commands::credentials_set::execute(
set_args.cf_api_key.clone(),
set_args.modrinth_token.clone(),
set_args.gh_access_token.clone(),
)
},
None => {
cli::commands::credentials::execute(
args.delete,
args.delete_file,
args.delete_keyring,
)
},
}
},
Commands::Cfg(args) => {
match &args.subcommand {
Some(cli::CfgSubcommand::Prj(prj_args)) => {
cli::commands::cfg_prj::execute(
&config_path,
&lockfile_path,
prj_args.project.clone(),
prj_args.r#type.as_deref(),
prj_args.side.as_deref(),
prj_args.update_strategy.as_deref(),
prj_args.redistributable,
prj_args.subpath.clone(),
prj_args.add_alias.clone(),
prj_args.remove_alias.clone(),
prj_args.export,
)
},
None => {
cli::commands::cfg::execute(
&config_path,
args.name.clone(),
args.version.clone(),
args.description.clone(),
args.author.clone(),
args.mods_path.clone(),
args.resource_packs_path.clone(),
args.data_packs_path.clone(),
args.worlds_path.clone(),
args.shaders_path.clone(),
)
},
}
},
Commands::Fork(args) => {
cli::commands::fork::execute(&args)?;
Ok(())
},
}
}

23
src/model.rs Normal file
View file

@ -0,0 +1,23 @@
pub mod config;
pub mod credentials;
pub mod enums;
pub mod fork;
pub mod lockfile;
pub mod r#override;
pub mod project;
pub use config::Config;
pub use credentials::{
PakkerCredentialsFile,
ResolvedCredentials,
set_keyring_secret,
};
pub use enums::{
ProjectSide,
ProjectType,
ReleaseType,
Target,
UpdateStrategy,
};
pub use lockfile::LockFile;
pub use project::{Project, ProjectFile};

383
src/model/config.rs Normal file
View file

@ -0,0 +1,383 @@
use std::{collections::HashMap, path::Path};
use serde::{Deserialize, Serialize};
use super::enums::{ProjectSide, ProjectType, UpdateStrategy};
use crate::error::{PakkerError, Result};
const CONFIG_NAME: &str = "pakker.json";
// Pakker config wrapper - supports both Pakker (direct) and Pakku (wrapped)
// formats
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum ConfigWrapper {
Pakker(Config),
Pakku { pakku: PakkerWrappedConfig },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PakkerWrappedConfig {
pub parent: Option<ParentConfig>,
#[serde(default)]
pub parent_lock_hash: String,
#[serde(default)]
pub patches: Vec<serde_json::Value>,
#[serde(default)]
pub projects: HashMap<String, ProjectConfig>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ParentConfig {
pub id: String,
pub r#ref: String,
pub ref_type: String,
pub remote_name: String,
#[serde(rename = "type")]
pub type_: String,
pub version: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
pub name: String,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub author: Option<String>,
#[serde(default)]
pub overrides: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub server_overrides: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub client_overrides: Option<Vec<String>>,
#[serde(default)]
pub paths: HashMap<String, String>,
#[serde(default)]
pub projects: Option<HashMap<String, ProjectConfig>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub export_profiles: Option<HashMap<String, crate::export::ProfileConfig>>,
}
impl Default for Config {
fn default() -> Self {
Self {
name: String::new(),
version: String::new(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: Some(HashMap::new()),
export_profiles: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct ProjectConfig {
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub r#type: Option<ProjectType>,
#[serde(skip_serializing_if = "Option::is_none")]
pub side: Option<ProjectSide>,
#[serde(skip_serializing_if = "Option::is_none")]
pub update_strategy: Option<UpdateStrategy>,
#[serde(skip_serializing_if = "Option::is_none")]
pub redistributable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub subpath: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub aliases: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub export: Option<bool>,
}
impl Config {
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
let path = path.as_ref().join(CONFIG_NAME);
let content =
std::fs::read_to_string(&path).map_err(PakkerError::IoError)?;
// Try to parse as ConfigWrapper (supports both Pakker and Pakku formats)
match serde_json::from_str::<ConfigWrapper>(&content) {
Ok(ConfigWrapper::Pakker(config)) => {
config.validate()?;
Ok(config)
},
Ok(ConfigWrapper::Pakku { pakku }) => {
// Convert Pakku format to Pakker format
// Pakku format doesn't have name/version, use parent repo info as
// fallback
let name = pakku
.parent
.as_ref()
.map(|p| {
// Extract repo name from URL
p.id
.split('/')
.next_back()
.unwrap_or(&p.id)
.trim_end_matches(".git")
.to_string()
})
.unwrap_or_else(|| "unknown".to_string());
let version = pakku
.parent
.as_ref()
.map_or_else(|| "unknown".to_string(), |p| p.version.clone());
Ok(Self {
name,
version,
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: Some(pakku.projects),
export_profiles: None,
})
},
Err(e) => Err(PakkerError::InvalidConfigFile(e.to_string())),
}
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
self.validate()?;
let path = path.as_ref().join(CONFIG_NAME);
// Write to temporary file first (atomic write)
let temp_path = path.with_extension("tmp");
let content = serde_json::to_string_pretty(self)
.map_err(PakkerError::SerializationError)?;
std::fs::write(&temp_path, content)?;
std::fs::rename(temp_path, path)?;
Ok(())
}
pub fn validate(&self) -> Result<()> {
if self.name.is_empty() {
return Err(PakkerError::InvalidConfigFile(
"Config name cannot be empty".to_string(),
));
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
#[test]
fn test_config_new() {
let config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert_eq!(config.name, "test-pack");
assert_eq!(config.version, "1.0.0");
assert_eq!(config.overrides, vec!["overrides"]);
assert!(config.projects.is_none());
}
#[test]
fn test_config_serialization() {
let mut config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
config.description = Some("A test modpack".to_string());
config.author = Some("Test Author".to_string());
let json = serde_json::to_string(&config).unwrap();
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.name, config.name);
assert_eq!(deserialized.version, config.version);
assert_eq!(deserialized.description, config.description);
assert_eq!(deserialized.author, config.author);
}
#[test]
fn test_config_save_and_load() {
let temp_dir = TempDir::new().unwrap();
let mut config = Config {
name: "test-pack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
config.description = Some("Test description".to_string());
config.save(temp_dir.path()).unwrap();
let loaded = Config::load(temp_dir.path()).unwrap();
assert_eq!(loaded.name, config.name);
assert_eq!(loaded.version, config.version);
assert_eq!(loaded.description, config.description);
}
#[test]
fn test_config_compatibility_with_pakku() {
// Test basic config loading with projects
let config = Config {
name: "test-modpack".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert_eq!(config.name, "test-modpack");
assert_eq!(config.version, "1.0.0");
assert!(config.projects.is_none());
}
#[test]
fn test_config_wrapped_format() {
let mut projects = HashMap::new();
projects.insert("sodium".to_string(), ProjectConfig {
r#type: Some(ProjectType::Mod),
side: Some(ProjectSide::Client),
update_strategy: None,
redistributable: None,
subpath: None,
aliases: None,
export: None,
});
let wrapped = PakkerWrappedConfig {
parent: None,
parent_lock_hash: String::new(),
patches: vec![],
projects,
};
let json = serde_json::to_string(&wrapped).unwrap();
assert!(json.contains("\"projects\""));
let deserialized: PakkerWrappedConfig =
serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.projects.len(), 1);
}
#[test]
fn test_config_wrapped_format_old() {
use crate::model::fork::{LocalConfig, LocalProjectConfig};
let mut projects = HashMap::new();
projects.insert("sodium".to_string(), LocalProjectConfig {
version: None,
r#type: Some(ProjectType::Mod),
side: Some(ProjectSide::Client),
update_strategy: None,
redistributable: None,
subpath: None,
aliases: None,
export: None,
});
let wrapped_inner = LocalConfig {
parent: None,
projects,
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
// Just verify we can create the struct
assert_eq!(wrapped_inner.projects.len(), 1);
}
#[test]
fn test_config_validate() {
let config = Config {
name: "test".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec!["overrides".to_string()],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert!(config.validate().is_ok());
let invalid = Config {
name: "".to_string(),
version: "1.0.0".to_string(),
description: None,
author: None,
overrides: vec![],
server_overrides: None,
client_overrides: None,
paths: HashMap::new(),
projects: None,
export_profiles: None,
};
assert!(invalid.validate().is_err());
}
}
impl Config {
pub fn get_project_config(&self, identifier: &str) -> Option<&ProjectConfig> {
self.projects.as_ref()?.get(identifier)
}
pub fn set_project_config(
&mut self,
identifier: String,
config: ProjectConfig,
) {
if self.projects.is_none() {
self.projects = Some(HashMap::new());
}
if let Some(ref mut projects) = self.projects {
projects.insert(identifier, config);
}
}
}

290
src/model/credentials.rs Normal file
View file

@ -0,0 +1,290 @@
use std::{fs, path::PathBuf};
use serde::{Deserialize, Serialize};
use crate::error::{PakkerError, Result};
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PakkerCredentialsFile {
#[serde(skip_serializing_if = "Option::is_none")]
pub curseforge_api_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub modrinth_token: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub github_access_token: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PakkerCompatCredentialsFile {
#[serde(skip_serializing_if = "Option::is_none")]
pub curseforge_api_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub github_access_token: Option<String>,
}
fn home_dir() -> Result<PathBuf> {
let home = std::env::var("HOME")
.or_else(|_| std::env::var("USERPROFILE"))
.map_err(|_| {
PakkerError::InternalError(
"Could not determine home directory".to_string(),
)
})?;
Ok(PathBuf::from(home))
}
impl PakkerCredentialsFile {
/// Pakker-owned credentials path: ~/.config/pakker/credentials.json
pub fn get_path() -> Result<PathBuf> {
Ok(
home_dir()?
.join(".config")
.join("pakker")
.join("credentials.json"),
)
}
pub fn load() -> Result<Self> {
let path = Self::get_path()?;
if !path.exists() {
return Ok(Self::default());
}
let content = fs::read_to_string(&path).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to read Pakker credentials file: {e}"
))
})?;
serde_json::from_str(&content).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to parse Pakker credentials file: {e}"
))
})
}
pub fn save(&self) -> Result<()> {
let path = Self::get_path()?;
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to create pakker config directory: {e}"
))
})?;
}
let content = serde_json::to_string_pretty(self).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to serialize credentials: {e}"
))
})?;
let temp_path = path.with_extension("tmp");
fs::write(&temp_path, content).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to write credentials file: {e}"
))
})?;
fs::rename(&temp_path, &path).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to save credentials file: {e}"
))
})?;
Ok(())
}
pub fn delete() -> Result<()> {
let path = Self::get_path()?;
if path.exists() {
fs::remove_file(&path).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to delete Pakker credentials file: {e}"
))
})?;
}
Ok(())
}
}
impl PakkerCompatCredentialsFile {
/// Pakku credentials path: ~/.pakku/credentials
/// Read-only: Pakker must never delete or modify this file.
pub fn get_path() -> Result<PathBuf> {
Ok(home_dir()?.join(".pakku").join("credentials"))
}
pub fn load() -> Result<Self> {
let path = Self::get_path()?;
if !path.exists() {
return Ok(Self::default());
}
let content = fs::read_to_string(&path).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to read Pakku credentials file: {e}"
))
})?;
serde_json::from_str(&content).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to parse Pakku credentials file: {e}"
))
})
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CredentialsSource {
Env,
Keyring,
PakkerFile,
}
#[derive(Debug, Clone, Default)]
pub struct ResolvedCredentials {
curseforge_api_key: Option<(String, CredentialsSource)>,
modrinth_token: Option<(String, CredentialsSource)>,
github_access_token: Option<(String, CredentialsSource)>,
}
impl ResolvedCredentials {
pub fn load() -> Result<Self> {
let pakker_file = PakkerCredentialsFile::load().ok();
let pakku_file = PakkerCompatCredentialsFile::load().ok();
Ok(Self {
curseforge_api_key: resolve_secret(
"PAKKER_CURSEFORGE_API_KEY",
"curseforge_api_key",
pakker_file
.as_ref()
.and_then(|f| f.curseforge_api_key.clone()),
pakku_file
.as_ref()
.and_then(|f| f.curseforge_api_key.clone()),
)?,
modrinth_token: resolve_secret(
"PAKKER_MODRINTH_TOKEN",
"modrinth_token",
pakker_file.as_ref().and_then(|f| f.modrinth_token.clone()),
None,
)?,
github_access_token: resolve_secret(
"PAKKER_GITHUB_TOKEN",
"github_access_token",
pakker_file
.as_ref()
.and_then(|f| f.github_access_token.clone()),
pakku_file
.as_ref()
.and_then(|f| f.github_access_token.clone()),
)?,
})
}
pub fn curseforge_api_key(&self) -> Option<&str> {
self.curseforge_api_key.as_ref().map(|(v, _)| v.as_str())
}
pub fn modrinth_token(&self) -> Option<&str> {
self.modrinth_token.as_ref().map(|(v, _)| v.as_str())
}
pub fn github_access_token(&self) -> Option<&str> {
self.github_access_token.as_ref().map(|(v, _)| v.as_str())
}
pub fn curseforge_source(&self) -> Option<CredentialsSource> {
self.curseforge_api_key.as_ref().map(|(_, s)| *s)
}
pub fn modrinth_source(&self) -> Option<CredentialsSource> {
self.modrinth_token.as_ref().map(|(_, s)| *s)
}
pub fn github_source(&self) -> Option<CredentialsSource> {
self.github_access_token.as_ref().map(|(_, s)| *s)
}
pub fn delete_keyring() -> Result<()> {
delete_keyring_secret("curseforge_api_key")?;
delete_keyring_secret("modrinth_token")?;
delete_keyring_secret("github_access_token")?;
Ok(())
}
}
fn resolve_secret(
env_key: &str,
keyring_entry: &str,
pakker_file_value: Option<String>,
pakku_file_value: Option<String>,
) -> Result<Option<(String, CredentialsSource)>> {
if let Ok(v) = std::env::var(env_key)
&& !v.trim().is_empty()
{
return Ok(Some((v.trim().to_string(), CredentialsSource::Env)));
}
if let Ok(v) = get_keyring_secret(keyring_entry)
&& !v.trim().is_empty()
{
return Ok(Some((v.trim().to_string(), CredentialsSource::Keyring)));
}
if let Some(v) = pakker_file_value.filter(|v| !v.trim().is_empty()) {
return Ok(Some((v, CredentialsSource::PakkerFile)));
}
Ok(
pakku_file_value
.filter(|v| !v.trim().is_empty())
.map(|v| (v, CredentialsSource::PakkerFile)),
)
}
fn get_keyring_secret(
entry: &str,
) -> std::result::Result<String, keyring::Error> {
let e = keyring::Entry::new("pakker", entry)?;
e.get_password()
}
pub fn set_keyring_secret(entry: &str, value: &str) -> Result<()> {
let e = keyring::Entry::new("pakker", entry).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to access keyring entry {entry}: {e}"
))
})?;
e.set_password(value).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to store secret in keyring entry {entry}: {e}"
))
})
}
fn delete_keyring_secret(entry: &str) -> Result<()> {
let e = keyring::Entry::new("pakker", entry).map_err(|e| {
PakkerError::InternalError(format!(
"Failed to access keyring entry {entry}: {e}"
))
})?;
match e.delete_credential() {
Ok(()) => Ok(()),
Err(keyring::Error::NoEntry) => Ok(()),
Err(e) => {
Err(PakkerError::InternalError(format!(
"Failed to delete keyring entry {entry}: {e}"
)))
},
}
}

156
src/model/enums.rs Normal file
View file

@ -0,0 +1,156 @@
use std::{fmt, str::FromStr};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum ProjectType {
#[serde(rename = "MOD")]
Mod,
#[serde(rename = "RESOURCE_PACK")]
ResourcePack,
#[serde(rename = "DATA_PACK")]
DataPack,
#[serde(rename = "SHADER")]
Shader,
#[serde(rename = "WORLD")]
World,
}
impl fmt::Display for ProjectType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Mod => write!(f, "mod"),
Self::ResourcePack => write!(f, "resource-pack"),
Self::DataPack => write!(f, "data-pack"),
Self::Shader => write!(f, "shader"),
Self::World => write!(f, "world"),
}
}
}
impl FromStr for ProjectType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"mod" => Ok(Self::Mod),
"resource-pack" | "resourcepack" => Ok(Self::ResourcePack),
"data-pack" | "datapack" => Ok(Self::DataPack),
"shader" => Ok(Self::Shader),
"world" => Ok(Self::World),
_ => Err(format!("Invalid project type: {s}")),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum ProjectSide {
#[serde(rename = "CLIENT")]
Client,
#[serde(rename = "SERVER")]
Server,
#[serde(rename = "BOTH")]
Both,
}
impl FromStr for ProjectSide {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"client" => Ok(Self::Client),
"server" => Ok(Self::Server),
"both" => Ok(Self::Both),
_ => Err(format!("Invalid project side: {s}")),
}
}
}
impl std::fmt::Display for ProjectSide {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Client => write!(f, "CLIENT"),
Self::Server => write!(f, "SERVER"),
Self::Both => write!(f, "BOTH"),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum UpdateStrategy {
#[serde(rename = "LATEST")]
Latest,
#[serde(rename = "NONE")]
None,
}
impl FromStr for UpdateStrategy {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_uppercase().as_str() {
"LATEST" => Ok(Self::Latest),
"NONE" => Ok(Self::None),
_ => Err(format!("Invalid update strategy: {s}")),
}
}
}
impl std::fmt::Display for UpdateStrategy {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Latest => write!(f, "LATEST"),
Self::None => write!(f, "NONE"),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ReleaseType {
Release,
Beta,
Alpha,
}
impl FromStr for ReleaseType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"release" => Ok(Self::Release),
"beta" => Ok(Self::Beta),
"alpha" => Ok(Self::Alpha),
_ => Err(format!("Invalid release type: {s}")),
}
}
}
impl fmt::Display for ReleaseType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Release => write!(f, "release"),
Self::Beta => write!(f, "beta"),
Self::Alpha => write!(f, "alpha"),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Target {
CurseForge,
Modrinth,
Multiplatform,
}
impl std::str::FromStr for Target {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"curseforge" => Ok(Self::CurseForge),
"modrinth" => Ok(Self::Modrinth),
"multiplatform" => Ok(Self::Multiplatform),
_ => Err(format!("Invalid target: {s}")),
}
}
}

480
src/model/fork.rs Normal file
View file

@ -0,0 +1,480 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use sha2::{Sha256, digest::Digest};
use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy};
/// Fork integrity verification data
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ForkIntegrity {
/// SHA256 hash of the parent lockfile content
pub lockfile_hash: String,
/// Git commit SHA of the parent
pub commit_sha: String,
/// Hash of the parent config (pakku.json)
pub config_hash: String,
/// Timestamp of verification
pub verified_at: u64,
}
impl ForkIntegrity {
pub fn new(
lockfile_hash: String,
commit_sha: String,
config_hash: String,
) -> Self {
use std::time::{SystemTime, UNIX_EPOCH};
let verified_at = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
Self {
lockfile_hash,
commit_sha,
config_hash,
verified_at,
}
}
}
/// Compute SHA256 hash of content
pub fn hash_content(content: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(content.as_bytes());
format!("{:x}", hasher.finalize())
}
/// Reference type for Git operations
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum RefType {
Branch,
Tag,
Commit,
}
impl std::str::FromStr for RefType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"branch" => Ok(Self::Branch),
"tag" => Ok(Self::Tag),
"commit" => Ok(Self::Commit),
_ => Err(format!("Invalid ref type: {s}")),
}
}
}
impl std::fmt::Display for RefType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Branch => write!(f, "branch"),
Self::Tag => write!(f, "tag"),
Self::Commit => write!(f, "commit"),
}
}
}
/// Parent configuration for fork management
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ParentConfig {
#[serde(rename = "type")]
pub type_: String, // Always "git" for now
pub id: String, // Git URL
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>, // Current commit SHA
#[serde(rename = "ref")]
pub ref_: String, // Branch/tag/commit name
pub ref_type: RefType,
#[serde(default = "default_remote_name")]
pub remote_name: String,
}
fn default_remote_name() -> String {
"origin".to_string()
}
/// Local project configuration for overrides
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LocalProjectConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub r#type: Option<ProjectType>,
#[serde(skip_serializing_if = "Option::is_none")]
pub side: Option<ProjectSide>,
#[serde(skip_serializing_if = "Option::is_none")]
pub update_strategy: Option<UpdateStrategy>,
#[serde(skip_serializing_if = "Option::is_none")]
pub redistributable: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub subpath: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub aliases: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub export: Option<bool>,
}
/// Local configuration stored in pakku.json under "pakku" section
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct LocalConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub parent: Option<ParentConfig>,
#[serde(default)]
pub projects: HashMap<String, LocalProjectConfig>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parent_lock_hash: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parent_config_hash: Option<String>,
#[serde(default)]
pub patches: Vec<String>,
}
impl LocalConfig {
pub const fn has_parent(&self) -> bool {
self.parent.is_some()
}
/// Load `LocalConfig` from pakker.json's "pakku" section (with fallback to
/// pakku.json)
pub fn load(
dir: &std::path::Path,
) -> Result<Self, crate::error::PakkerError> {
use std::fs;
use crate::error::PakkerError;
// Try pakker.json first, then fall back to pakku.json
let config_path = if dir.join("pakker.json").exists() {
dir.join("pakker.json")
} else {
dir.join("pakku.json")
};
if !config_path.exists() {
return Ok(Self::default());
}
let content = fs::read_to_string(&config_path).map_err(|e| {
PakkerError::InvalidConfigFile(format!(
"Failed to read {}: {}",
config_path.display(),
e
))
})?;
let json_value: serde_json::Value = serde_json::from_str(&content)
.map_err(|e| {
PakkerError::InvalidConfigFile(format!(
"Failed to parse pakku.json: {e}"
))
})?;
// Extract "pakku" section if it exists
if let Some(pakku_section) = json_value.get("pakku") {
let local_config: Self = serde_json::from_value(pakku_section.clone())
.map_err(|e| {
PakkerError::InvalidConfigFile(format!(
"Failed to parse pakku section: {e}"
))
})?;
Ok(local_config)
} else {
Ok(Self::default())
}
}
/// Save `LocalConfig` to pakku.json's "pakku" section
pub fn save(
&self,
dir: &std::path::Path,
) -> Result<(), crate::error::PakkerError> {
use std::fs;
use crate::error::PakkerError;
let config_path = dir.join("pakker.json");
// Read existing pakku.json
let mut json_value: serde_json::Value = if config_path.exists() {
let content = fs::read_to_string(&config_path).map_err(|e| {
PakkerError::InvalidConfigFile(format!(
"Failed to read {}: {}",
config_path.display(),
e
))
})?;
serde_json::from_str(&content).map_err(|e| {
PakkerError::InvalidConfigFile(format!(
"Failed to parse pakku.json: {e}"
))
})?
} else {
serde_json::json!({})
};
// Update or create "pakku" section
let local_config_json =
serde_json::to_value(self).map_err(PakkerError::SerializationError)?;
json_value["pakku"] = local_config_json;
// Write back to file
let content = serde_json::to_string_pretty(&json_value)
.map_err(PakkerError::SerializationError)?;
fs::write(&config_path, content).map_err(|e| {
PakkerError::IoError(std::io::Error::other(format!(
"Failed to write {}: {}",
config_path.display(),
e
)))
})?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ref_type_serde_serialization() {
// Test that RefType serializes to uppercase screaming snake case
let branch = RefType::Branch;
let tag = RefType::Tag;
let commit = RefType::Commit;
assert_eq!(serde_json::to_string(&branch).unwrap(), "\"BRANCH\"");
assert_eq!(serde_json::to_string(&tag).unwrap(), "\"TAG\"");
assert_eq!(serde_json::to_string(&commit).unwrap(), "\"COMMIT\"");
}
#[test]
fn test_ref_type_clap_value_enum() {
// Test that clap ValueEnum derives work correctly
let values: Vec<RefType> =
vec![RefType::Branch, RefType::Tag, RefType::Commit];
assert_eq!(values.len(), 3);
}
#[test]
fn test_parent_config_new() {
let config = ParentConfig {
type_: "git".to_string(),
id: "https://github.com/example/repo".to_string(),
version: None,
ref_: "main".to_string(),
ref_type: RefType::Branch,
remote_name: "upstream".to_string(),
};
assert_eq!(config.type_, "git");
assert_eq!(config.id, "https://github.com/example/repo");
assert_eq!(config.version, None);
assert_eq!(config.ref_, "main");
assert_eq!(config.ref_type, RefType::Branch);
assert_eq!(config.remote_name, "upstream");
}
#[test]
fn test_parent_config_default_remote() {
let config = ParentConfig {
type_: "git".to_string(),
id: "https://github.com/example/repo".to_string(),
version: None,
ref_: "main".to_string(),
ref_type: RefType::Branch,
remote_name: "origin".to_string(),
};
assert_eq!(config.remote_name, "origin");
}
#[test]
fn test_parent_config_serde_roundtrip() {
let mut original = ParentConfig {
type_: "git".to_string(),
id: "https://github.com/example/repo.git".to_string(),
version: None,
ref_: "v1.0.0".to_string(),
ref_type: RefType::Tag,
remote_name: "origin".to_string(),
};
original.version = Some("abc123def456".to_string());
let json = serde_json::to_string(&original).unwrap();
let deserialized: ParentConfig = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.type_, original.type_);
assert_eq!(deserialized.id, original.id);
assert_eq!(deserialized.version, original.version);
assert_eq!(deserialized.ref_, original.ref_);
assert_eq!(deserialized.ref_type, original.ref_type);
assert_eq!(deserialized.remote_name, original.remote_name);
}
#[test]
fn test_local_project_config_default() {
let config = LocalProjectConfig {
version: None,
r#type: None,
side: None,
update_strategy: None,
redistributable: None,
subpath: None,
aliases: None,
export: None,
};
assert_eq!(config.version, None);
assert_eq!(config.side, None);
assert_eq!(config.update_strategy, None);
assert_eq!(config.redistributable, None);
}
#[test]
fn test_local_config_default() {
let config = LocalConfig {
parent: None,
projects: HashMap::new(),
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
assert!(config.parent.is_none());
assert!(config.projects.is_empty());
assert!(config.patches.is_empty());
}
#[test]
fn test_local_config_has_parent_false() {
let config = LocalConfig {
parent: None,
projects: HashMap::new(),
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
assert!(!config.has_parent());
}
#[test]
fn test_local_config_has_parent_true() {
let mut config = LocalConfig {
parent: None,
projects: HashMap::new(),
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
config.parent = Some(ParentConfig {
type_: "git".to_string(),
id: "https://github.com/example/repo".to_string(),
version: None,
ref_: "main".to_string(),
ref_type: RefType::Branch,
remote_name: "origin".to_string(),
});
assert!(config.has_parent());
}
#[test]
fn test_local_config_projects_insertion() {
let mut config = LocalConfig {
parent: None,
projects: HashMap::new(),
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
config
.projects
.insert("sodium".to_string(), LocalProjectConfig {
version: Some("0.5.0".to_string()),
r#type: None,
side: Some(ProjectSide::Both),
update_strategy: Some(UpdateStrategy::Latest),
redistributable: Some(true),
subpath: None,
aliases: None,
export: None,
});
assert_eq!(config.projects.len(), 1);
let project = config.projects.get("sodium").unwrap();
assert_eq!(project.version, Some("0.5.0".to_string()));
assert_eq!(project.side, Some(ProjectSide::Both));
}
#[test]
fn test_local_config_patches() {
let mut config = LocalConfig {
parent: None,
projects: HashMap::new(),
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
config.patches.push("custom.patch".to_string());
config.patches.push("bugfix.patch".to_string());
assert_eq!(config.patches.len(), 2);
assert_eq!(config.patches[0], "custom.patch");
}
#[test]
fn test_local_config_serde_roundtrip() {
let mut config = LocalConfig {
parent: None,
projects: HashMap::new(),
parent_lock_hash: None,
parent_config_hash: None,
patches: vec![],
};
config.parent = Some(ParentConfig {
type_: "git".to_string(),
id: "https://github.com/example/repo.git".to_string(),
version: None,
ref_: "develop".to_string(),
ref_type: RefType::Branch,
remote_name: "origin".to_string(),
});
config.parent.as_mut().unwrap().version = Some("def456".to_string());
config
.projects
.insert("test-mod".to_string(), LocalProjectConfig {
version: Some("1.0.0".to_string()),
r#type: None,
side: Some(ProjectSide::Client),
update_strategy: None,
redistributable: Some(false),
subpath: None,
aliases: None,
export: None,
});
config.patches.push("test.patch".to_string());
config.parent_lock_hash = Some("hash123".to_string());
let json = serde_json::to_string(&config).unwrap();
let deserialized: LocalConfig = serde_json::from_str(&json).unwrap();
assert!(deserialized.parent.is_some());
let parent = deserialized.parent.unwrap();
assert_eq!(parent.id, "https://github.com/example/repo.git");
assert_eq!(parent.ref_, "develop");
assert_eq!(parent.ref_type, RefType::Branch);
assert_eq!(parent.version, Some("def456".to_string()));
assert_eq!(deserialized.projects.len(), 1);
assert!(deserialized.projects.contains_key("test-mod"));
assert_eq!(deserialized.patches.len(), 1);
assert_eq!(deserialized.parent_lock_hash, Some("hash123".to_string()));
}
}

622
src/model/lockfile.rs Normal file
View file

@ -0,0 +1,622 @@
use std::{collections::HashMap, path::Path};
use serde::{Deserialize, Serialize};
use super::{enums::Target, project::Project};
use crate::error::{PakkerError, Result};
#[cfg(test)]
mod tests {
use tempfile::TempDir;
use super::*;
use crate::model::enums::{ProjectSide, ProjectType, UpdateStrategy};
fn create_test_project(pakku_id: &str, slug: &str) -> Project {
use std::collections::HashSet;
let mut name_map = HashMap::new();
name_map.insert("modrinth".to_string(), slug.to_string());
let mut id_map = HashMap::new();
id_map.insert("modrinth".to_string(), pakku_id.to_string());
Project {
pakku_id: Some(pakku_id.to_string()),
pakku_links: HashSet::new(),
r#type: ProjectType::Mod,
side: ProjectSide::Both,
slug: name_map.clone(),
name: name_map.clone(),
id: id_map,
update_strategy: UpdateStrategy::Latest,
redistributable: true,
subpath: None,
aliases: HashSet::new(),
export: true,
files: vec![],
}
}
#[test]
fn test_lockfile_new() {
let target = Target::Modrinth;
let mc_versions = vec!["1.20.1".to_string()];
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(target),
mc_versions: mc_versions.clone(),
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
assert_eq!(lockfile.target, Some(target));
assert_eq!(lockfile.mc_versions, mc_versions);
assert_eq!(lockfile.loaders, loaders);
assert_eq!(lockfile.projects.len(), 0);
assert_eq!(lockfile.lockfile_version, 1);
}
#[test]
fn test_lockfile_serialization() {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("test-id", "test-slug"));
let found = lockfile.find_project("test-id");
assert!(found.is_some());
assert_eq!(found.unwrap().pakku_id, Some("test-id".to_string()));
let not_found = lockfile.find_project("nonexistent");
assert!(not_found.is_none());
}
#[test]
fn test_lockfile_find_project_by_platform_id() {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("platform-123", "test-slug"));
let found =
lockfile.find_project_by_platform_id("modrinth", "platform-123");
assert!(found.is_some());
assert_eq!(
found.unwrap().id.get("modrinth"),
Some(&"platform-123".to_string())
);
}
#[test]
fn test_lockfile_get_loader_names() {
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
loaders.insert("forge".to_string(), "47.1.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
let mut loader_names = lockfile.get_loader_names();
loader_names.sort();
assert_eq!(loader_names, vec!["fabric", "forge"]);
}
#[test]
fn test_lockfile_save_and_load() {
let temp_dir = TempDir::new().unwrap();
let lockfile_path = temp_dir.path();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("test-mod", "test-slug"));
lockfile.save(lockfile_path).unwrap();
let loaded = LockFile::load(lockfile_path).unwrap();
assert_eq!(loaded.target, lockfile.target);
assert_eq!(loaded.mc_versions, lockfile.mc_versions);
assert_eq!(loaded.projects.len(), 1);
}
#[test]
fn test_lockfile_compatibility_with_pakku() {
// Test that we can parse a Pakku-generated lockfile
let pakku_json = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [
{
"pakku_id": "fabric-api",
"type": "MOD",
"side": "BOTH",
"slug": {
"modrinth": "fabric-api"
},
"name": {
"modrinth": "Fabric API"
},
"id": {
"modrinth": "P7dR8mSH"
},
"updateStrategy": "LATEST",
"redistributable": true,
"files": [],
"pakku_links": []
}
],
"lockfile_version": 1
}"#;
let lockfile: LockFile = serde_json::from_str(pakku_json).unwrap();
assert_eq!(lockfile.target, Some(Target::Modrinth));
assert_eq!(lockfile.mc_versions, vec!["1.20.1"]);
assert_eq!(lockfile.projects.len(), 1);
}
#[test]
fn test_lockfile_validation_invalid_version() {
// Test that lockfile with wrong version fails validation
let temp_dir = TempDir::new().unwrap();
let lockfile_path = temp_dir.path().join("pakku-lock.json");
let invalid_json = r#"{
"target": "modrinth",
"mc_versions": ["1.20.1"],
"loaders": {"fabric": "0.15.0"},
"projects": [],
"lockfile_version": 999
}"#;
std::fs::write(&lockfile_path, invalid_json).unwrap();
let result = LockFile::load(temp_dir.path());
assert!(result.is_err());
}
#[test]
fn test_lockfile_validation_duplicate_pakku_ids() {
// Test that lockfile with duplicate pakku_ids fails validation
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("duplicate-id", "slug1"));
lockfile.add_project(create_test_project("duplicate-id", "slug2"));
let result = lockfile.validate();
assert!(result.is_err());
}
#[test]
fn test_lockfile_atomic_write() {
// Test that save uses atomic write (temp file + rename)
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.save(temp_dir.path()).unwrap();
// Temp file should not exist after save
let temp_path = temp_dir.path().join("pakku-lock.tmp");
assert!(!temp_path.exists());
// Actual file should exist
let lockfile_path = temp_dir.path().join("pakku-lock.json");
assert!(lockfile_path.exists());
}
#[test]
fn test_lockfile_sort_projects() {
// Test that projects are sorted alphabetically by name
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("zebra-mod", "zebra"));
lockfile.add_project(create_test_project("alpha-mod", "alpha"));
lockfile.add_project(create_test_project("middle-mod", "middle"));
lockfile.sort_projects();
assert_eq!(lockfile.projects[0].pakku_id, Some("alpha-mod".to_string()));
assert_eq!(
lockfile.projects[1].pakku_id,
Some("middle-mod".to_string())
);
assert_eq!(lockfile.projects[2].pakku_id, Some("zebra-mod".to_string()));
}
#[test]
fn test_lockfile_find_project_mut() {
// Test mutable project lookup
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("test-id", "test-slug"));
// Modify through mutable reference
if let Some(project) = lockfile.find_project_mut("test-id") {
project.redistributable = false;
}
let found = lockfile.get_project("test-id").unwrap();
assert_eq!(found.redistributable, false);
}
#[test]
fn test_lockfile_multiple_loaders() {
// Test lockfile with multiple loaders
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
loaders.insert("forge".to_string(), "47.1.0".to_string());
loaders.insert("quilt".to_string(), "0.20.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
let loader_names = lockfile.get_loader_names();
assert_eq!(loader_names.len(), 3);
assert!(loader_names.contains(&"fabric".to_string()));
assert!(loader_names.contains(&"forge".to_string()));
assert!(loader_names.contains(&"quilt".to_string()));
}
#[test]
fn test_lockfile_multiple_mc_versions() {
// Test lockfile with multiple Minecraft versions
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mc_versions = vec![
"1.20.1".to_string(),
"1.20.2".to_string(),
"1.20.4".to_string(),
];
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: mc_versions.clone(),
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
assert_eq!(lockfile.mc_versions, mc_versions);
}
#[test]
fn test_lockfile_roundtrip_preserves_data() {
// Test that save/load roundtrip preserves all data
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
loaders.insert("forge".to_string(), "47.1.0".to_string());
let mc_versions = vec!["1.20.1".to_string(), "1.20.4".to_string()];
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: mc_versions.clone(),
loaders: loaders.clone(),
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.add_project(create_test_project("mod1", "slug1"));
lockfile.add_project(create_test_project("mod2", "slug2"));
lockfile.save(temp_dir.path()).unwrap();
let loaded = LockFile::load(temp_dir.path()).unwrap();
assert_eq!(loaded.target, Some(Target::Modrinth));
assert_eq!(loaded.mc_versions, mc_versions);
assert_eq!(loaded.loaders, loaders);
assert_eq!(loaded.projects.len(), 2);
assert_eq!(loaded.lockfile_version, 1);
}
#[test]
fn test_lockfile_remove_nonexistent_project() {
// Test removing a project that doesn't exist
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let mut lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
let result = lockfile.remove_project("nonexistent-id");
assert!(result.is_none());
}
#[test]
fn test_lockfile_empty_projects_list() {
// Test lockfile with no projects
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
assert_eq!(lockfile.projects.len(), 0);
assert!(lockfile.validate().is_ok());
}
#[test]
fn test_lockfile_pretty_json_format() {
// Test that saved JSON is pretty-printed
let temp_dir = TempDir::new().unwrap();
let mut loaders = HashMap::new();
loaders.insert("fabric".to_string(), "0.15.0".to_string());
let lockfile = LockFile {
target: Some(Target::Modrinth),
mc_versions: vec!["1.20.1".to_string()],
loaders,
projects: Vec::new(),
lockfile_version: 1,
};
lockfile.save(temp_dir.path()).unwrap();
let content =
std::fs::read_to_string(temp_dir.path().join("pakku-lock.json")).unwrap();
// Pretty-printed JSON should have newlines and indentation
assert!(content.contains('\n'));
assert!(content.contains(" ")); // Indentation
}
#[test]
fn test_lockfile_missing_file() {
// Test loading from non-existent directory
let temp_dir = TempDir::new().unwrap();
let nonexistent = temp_dir.path().join("nonexistent");
let result = LockFile::load(&nonexistent);
assert!(result.is_err());
}
#[test]
fn test_lockfile_corrupted_json() {
// Test loading corrupted JSON
let temp_dir = TempDir::new().unwrap();
let lockfile_path = temp_dir.path().join("pakku-lock.json");
std::fs::write(&lockfile_path, "not valid json {[}").unwrap();
let result = LockFile::load(temp_dir.path());
assert!(result.is_err());
}
}
const LOCKFILE_VERSION: u32 = 1;
const LOCKFILE_NAME: &str = "pakku-lock.json";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LockFile {
#[serde(skip_serializing_if = "Option::is_none")]
pub target: Option<Target>,
pub mc_versions: Vec<String>,
pub loaders: HashMap<String, String>,
pub projects: Vec<Project>,
pub lockfile_version: u32,
}
impl LockFile {
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
Self::load_with_validation(path, true)
}
pub fn load_with_validation<P: AsRef<Path>>(
path: P,
validate: bool,
) -> Result<Self> {
let path = path.as_ref().join(LOCKFILE_NAME);
let content =
std::fs::read_to_string(&path).map_err(PakkerError::IoError)?;
let mut lockfile: Self = serde_json::from_str(&content)
.map_err(|e| PakkerError::InvalidLockFile(e.to_string()))?;
if validate {
lockfile.validate()?;
}
lockfile.sort_projects();
Ok(lockfile)
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
self.validate()?;
let path = path.as_ref().join(LOCKFILE_NAME);
let content = serde_json::to_string_pretty(self)
.map_err(PakkerError::SerializationError)?;
std::fs::write(&path, content).map_err(PakkerError::IoError)
}
pub fn save_without_validation<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let path = path.as_ref().join(LOCKFILE_NAME);
let content = serde_json::to_string_pretty(self)
.map_err(PakkerError::SerializationError)?;
std::fs::write(&path, content).map_err(PakkerError::IoError)
}
pub fn validate(&self) -> Result<()> {
if self.lockfile_version != LOCKFILE_VERSION {
return Err(PakkerError::InvalidLockFile(format!(
"Unsupported lockfile version: {}",
self.lockfile_version
)));
}
if self.mc_versions.is_empty() {
return Err(PakkerError::InvalidLockFile(
"At least one Minecraft version is required".to_string(),
));
}
if self.loaders.is_empty() {
return Err(PakkerError::InvalidLockFile(
"At least one loader is required".to_string(),
));
}
// Check for unique pakku IDs
let mut seen_ids = std::collections::HashSet::new();
for project in &self.projects {
if let Some(ref pakku_id) = project.pakku_id
&& !seen_ids.insert(pakku_id)
{
return Err(PakkerError::InvalidLockFile(format!(
"Duplicate pakku ID: {pakku_id}"
)));
}
}
Ok(())
}
pub fn sort_projects(&mut self) {
self.projects.sort_by(|a, b| {
a.get_name()
.to_lowercase()
.cmp(&b.get_name().to_lowercase())
});
}
pub fn add_project(&mut self, project: Project) {
self.projects.push(project);
self.projects.sort_by_key(super::project::Project::get_name);
}
pub fn get_project(&self, pakku_id: &str) -> Option<&Project> {
self
.projects
.iter()
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
}
pub fn get_loader_names(&self) -> Vec<String> {
self.loaders.keys().cloned().collect()
}
pub fn remove_project(&mut self, pakku_id: &str) -> Option<Project> {
if let Some(pos) = self
.projects
.iter()
.position(|p| p.pakku_id.as_deref() == Some(pakku_id))
{
Some(self.projects.remove(pos))
} else {
None
}
}
pub fn find_project(&self, pakku_id: &str) -> Option<&Project> {
self
.projects
.iter()
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
}
pub fn find_project_mut(&mut self, pakku_id: &str) -> Option<&mut Project> {
self
.projects
.iter_mut()
.find(|p| p.pakku_id.as_deref() == Some(pakku_id))
}
pub fn find_project_by_platform_id(
&self,
platform: &str,
id: &str,
) -> Option<&Project> {
self
.projects
.iter()
.find(|p| p.id.get(platform).is_some_and(|pid| pid == id))
}
}

1
src/model/override.rs Normal file
View file

@ -0,0 +1 @@

439
src/model/project.rs Normal file
View file

@ -0,0 +1,439 @@
use std::collections::{HashMap, HashSet};
use serde::{Deserialize, Serialize};
use super::enums::{ProjectSide, ProjectType, ReleaseType, UpdateStrategy};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Project {
#[serde(skip_serializing_if = "Option::is_none")]
pub pakku_id: Option<String>,
#[serde(skip_serializing_if = "HashSet::is_empty", default)]
pub pakku_links: HashSet<String>,
#[serde(rename = "type")]
pub r#type: ProjectType,
#[serde(default = "default_side")]
pub side: ProjectSide,
pub slug: HashMap<String, String>,
pub name: HashMap<String, String>,
pub id: HashMap<String, String>,
#[serde(
default = "default_update_strategy",
skip_serializing_if = "is_default_update_strategy"
)]
pub update_strategy: UpdateStrategy,
#[serde(
default = "default_redistributable",
skip_serializing_if = "is_default_redistributable"
)]
pub redistributable: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub subpath: Option<String>,
#[serde(default, skip_serializing_if = "HashSet::is_empty")]
pub aliases: HashSet<String>,
#[serde(
default = "default_export",
skip_serializing_if = "is_default_export"
)]
pub export: bool,
pub files: Vec<ProjectFile>,
}
const fn default_export() -> bool {
true
}
const fn default_side() -> ProjectSide {
ProjectSide::Both
}
const fn default_update_strategy() -> UpdateStrategy {
UpdateStrategy::Latest
}
const fn default_redistributable() -> bool {
true
}
const fn is_default_update_strategy(strategy: &UpdateStrategy) -> bool {
matches!(strategy, UpdateStrategy::Latest)
}
const fn is_default_redistributable(redistributable: &bool) -> bool {
*redistributable
}
const fn is_default_export(export: &bool) -> bool {
*export
}
impl Project {
pub fn new(pakku_id: String, typ: ProjectType, side: ProjectSide) -> Self {
Self {
pakku_id: Some(pakku_id),
pakku_links: HashSet::new(),
r#type: typ,
side,
slug: HashMap::new(),
name: HashMap::new(),
id: HashMap::new(),
update_strategy: UpdateStrategy::Latest,
redistributable: true,
subpath: None,
aliases: HashSet::new(),
export: true,
files: Vec::new(),
}
}
pub fn get_platform_id(&self, platform: &str) -> Option<&String> {
self.id.get(platform)
}
pub fn get_name(&self) -> String {
self.name.values().next().cloned().unwrap_or_else(|| {
self
.pakku_id
.clone()
.unwrap_or_else(|| "unknown".to_string())
})
}
pub fn matches_input(&self, input: &str) -> bool {
// Check pakku_id
if let Some(ref pakku_id) = self.pakku_id
&& pakku_id == input
{
return true;
}
// Check slugs
if self.slug.values().any(|s| s == input) {
return true;
}
// Check names (case-insensitive)
if self.name.values().any(|n| n.eq_ignore_ascii_case(input)) {
return true;
}
// Check IDs
if self.id.values().any(|i| i == input) {
return true;
}
// Check aliases
if self.aliases.contains(input) {
return true;
}
false
}
pub fn add_platform(
&mut self,
platform: String,
id: String,
slug: String,
name: String,
) {
self.id.insert(platform.clone(), id);
self.slug.insert(platform.clone(), slug);
self.name.insert(platform, name);
}
pub fn merge(&mut self, other: Self) {
// Merge platform identifiers
for (platform, id) in other.id {
self.id.entry(platform.clone()).or_insert(id);
}
for (platform, slug) in other.slug {
self.slug.entry(platform.clone()).or_insert(slug);
}
for (platform, name) in other.name {
self.name.entry(platform).or_insert(name);
}
// Merge pakku links
self.pakku_links.extend(other.pakku_links);
// Merge files
for file in other.files {
if !self.files.iter().any(|f| f.id == file.id) {
self.files.push(file);
}
}
// Merge aliases
self.aliases.extend(other.aliases);
}
pub fn select_file(
&mut self,
mc_versions: &[String],
loaders: &[String],
) -> crate::error::Result<()> {
// Filter compatible files
let compatible_files: Vec<_> = self
.files
.iter()
.filter(|f| f.is_compatible(mc_versions, loaders))
.collect();
if compatible_files.is_empty() {
return Err(crate::error::PakkerError::FileSelectionError(format!(
"No compatible files found for {}",
self.get_name()
)));
}
// Sort by release type (release > beta > alpha) and date
let mut sorted_files = compatible_files.clone();
sorted_files.sort_by(|a, b| {
use super::enums::ReleaseType;
let type_order = |rt: &ReleaseType| {
match rt {
ReleaseType::Release => 0,
ReleaseType::Beta => 1,
ReleaseType::Alpha => 2,
}
};
type_order(&a.release_type)
.cmp(&type_order(&b.release_type))
.then_with(|| b.date_published.cmp(&a.date_published))
});
// Keep only the best file
if let Some(best_file) = sorted_files.first() {
self.files = vec![(*best_file).clone()];
}
Ok(())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectFile {
#[serde(rename = "type")]
pub file_type: String,
pub file_name: String,
pub mc_versions: Vec<String>,
#[serde(default)]
pub loaders: Vec<String>,
pub release_type: ReleaseType,
pub url: String,
pub id: String,
pub parent_id: String,
pub hashes: HashMap<String, String>,
pub required_dependencies: Vec<String>,
pub size: u64,
pub date_published: String,
}
impl ProjectFile {
pub fn is_compatible(
&self,
mc_versions: &[String],
loaders: &[String],
) -> bool {
const VALID_LOADERS: &[&str] =
&["minecraft", "iris", "optifine", "datapack"];
let mc_compatible =
self.mc_versions.iter().any(|v| mc_versions.contains(v));
// Accept files with empty loaders, OR loaders matching request, OR valid
// special loaders
let loader_compatible = self.loaders.is_empty()
|| self.loaders.iter().any(|l| loaders.contains(l))
|| self
.loaders
.iter()
.any(|l| VALID_LOADERS.contains(&l.as_str()));
mc_compatible && loader_compatible
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_project_new() {
let project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
assert_eq!(project.pakku_id, Some("test-id".to_string()));
assert_eq!(project.r#type, ProjectType::Mod);
assert_eq!(project.side, ProjectSide::Both);
assert!(project.pakku_links.is_empty());
assert!(project.files.is_empty());
}
#[test]
fn test_project_serialization() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
project
.slug
.insert("modrinth".to_string(), "test-slug".to_string());
project
.name
.insert("modrinth".to_string(), "Test Mod".to_string());
project
.id
.insert("modrinth".to_string(), "abc123".to_string());
let json = serde_json::to_string(&project).unwrap();
let deserialized: Project = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.pakku_id, project.pakku_id);
assert_eq!(deserialized.r#type, project.r#type);
assert_eq!(deserialized.side, project.side);
assert_eq!(
deserialized.slug.get("modrinth"),
Some(&"test-slug".to_string())
);
}
#[test]
fn test_project_file_is_compatible_with_empty_loaders() {
let file = ProjectFile {
file_type: "mod".to_string(),
file_name: "test.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec![], // Empty loaders should be accepted
release_type: ReleaseType::Release,
url: "https://example.com/test.jar".to_string(),
id: "file123".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let lockfile_mc = vec!["1.20.1".to_string()];
let lockfile_loaders = vec!["fabric".to_string()];
assert!(file.is_compatible(&lockfile_mc, &lockfile_loaders));
}
#[test]
fn test_project_file_is_compatible_with_matching_loaders() {
let file = ProjectFile {
file_type: "mod".to_string(),
file_name: "test.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/test.jar".to_string(),
id: "file123".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let lockfile_mc = vec!["1.20.1".to_string()];
let lockfile_loaders = vec!["fabric".to_string()];
assert!(file.is_compatible(&lockfile_mc, &lockfile_loaders));
}
#[test]
fn test_project_file_is_compatible_with_valid_loaders() {
for loader in ["minecraft", "iris", "optifine", "datapack"] {
let file = ProjectFile {
file_type: "mod".to_string(),
file_name: "test.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec![loader.to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/test.jar".to_string(),
id: "file123".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let lockfile_mc = vec!["1.20.1".to_string()];
let lockfile_loaders = vec!["fabric".to_string()];
assert!(
file.is_compatible(&lockfile_mc, &lockfile_loaders),
"Failed for valid loader: {}",
loader
);
}
}
#[test]
fn test_project_file_incompatible() {
let file = ProjectFile {
file_type: "mod".to_string(),
file_name: "test.jar".to_string(),
mc_versions: vec!["1.19.4".to_string()],
loaders: vec!["forge".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/test.jar".to_string(),
id: "file123".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
};
let lockfile_mc = vec!["1.20.1".to_string()];
let lockfile_loaders = vec!["fabric".to_string()];
assert!(!file.is_compatible(&lockfile_mc, &lockfile_loaders));
}
#[test]
fn test_project_select_file() {
let mut project =
Project::new("test-id".to_string(), ProjectType::Mod, ProjectSide::Both);
project.files.push(ProjectFile {
file_type: "mod".to_string(),
file_name: "alpha.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Alpha,
url: "https://example.com/alpha.jar".to_string(),
id: "file1".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-03T00:00:00Z".to_string(),
});
project.files.push(ProjectFile {
file_type: "mod".to_string(),
file_name: "release.jar".to_string(),
mc_versions: vec!["1.20.1".to_string()],
loaders: vec!["fabric".to_string()],
release_type: ReleaseType::Release,
url: "https://example.com/release.jar".to_string(),
id: "file2".to_string(),
parent_id: "mod123".to_string(),
hashes: HashMap::new(),
required_dependencies: vec![],
size: 1024,
date_published: "2024-01-01T00:00:00Z".to_string(),
});
let lockfile_mc = vec!["1.20.1".to_string()];
let lockfile_loaders = vec!["fabric".to_string()];
let result = project.select_file(&lockfile_mc, &lockfile_loaders);
assert!(result.is_ok());
}
}

102
src/platform.rs Normal file
View file

@ -0,0 +1,102 @@
mod curseforge;
mod github;
mod modrinth;
mod traits;
use std::sync::Arc;
pub use curseforge::CurseForgePlatform;
pub use github::GitHubPlatform;
pub use modrinth::ModrinthPlatform;
use once_cell::sync::Lazy;
pub use traits::PlatformClient;
use crate::{error::Result, rate_limiter::RateLimiter};
static RATE_LIMITER: Lazy<Arc<RateLimiter>> =
Lazy::new(|| Arc::new(RateLimiter::new(None)));
pub fn create_platform(
platform: &str,
api_key: Option<String>,
) -> Result<Box<dyn PlatformClient>> {
let client = create_client(platform, api_key)?;
let platform_name = platform.to_string();
Ok(Box::new(RateLimitedPlatform {
platform: client,
rate_limiter: RATE_LIMITER.clone(),
platform_name,
}))
}
fn create_client(
platform: &str,
api_key: Option<String>,
) -> Result<Box<dyn PlatformClient>> {
match platform {
"modrinth" => Ok(Box::new(ModrinthPlatform::new())),
"curseforge" => Ok(Box::new(CurseForgePlatform::new(api_key))),
"github" => Ok(Box::new(GitHubPlatform::new(api_key))),
_ => {
Err(crate::error::PakkerError::ConfigError(format!(
"Unknown platform: {platform}"
)))
},
}
}
struct RateLimitedPlatform {
platform: Box<dyn PlatformClient>,
rate_limiter: Arc<RateLimiter>,
platform_name: String,
}
#[async_trait::async_trait]
impl PlatformClient for RateLimitedPlatform {
async fn request_project(
&self,
identifier: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<crate::model::Project> {
self.rate_limiter.wait_for(&self.platform_name).await;
self
.platform
.request_project(identifier, mc_versions, loaders)
.await
}
async fn request_project_files(
&self,
project_id: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Vec<crate::model::ProjectFile>> {
self.rate_limiter.wait_for(&self.platform_name).await;
self
.platform
.request_project_files(project_id, mc_versions, loaders)
.await
}
async fn request_project_with_files(
&self,
identifier: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<crate::model::Project> {
self.rate_limiter.wait_for(&self.platform_name).await;
self
.platform
.request_project_with_files(identifier, mc_versions, loaders)
.await
}
async fn lookup_by_hash(
&self,
hash: &str,
) -> Result<Option<crate::model::Project>> {
self.rate_limiter.wait_for(&self.platform_name).await;
self.platform.lookup_by_hash(hash).await
}
}

383
src/platform/curseforge.rs Normal file
View file

@ -0,0 +1,383 @@
use std::collections::HashMap;
use async_trait::async_trait;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use super::traits::PlatformClient;
use crate::{
error::{PakkerError, Result},
model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType},
utils::generate_pakku_id,
};
const CURSEFORGE_API_BASE: &str = "https://api.curseforge.com/v1";
const LOADER_VERSION_TYPE_ID: i32 = 68441;
pub struct CurseForgePlatform {
client: Client,
api_key: Option<String>,
}
impl CurseForgePlatform {
pub fn new(api_key: Option<String>) -> Self {
Self {
client: Client::new(),
api_key,
}
}
fn get_headers(&self) -> Result<reqwest::header::HeaderMap> {
let mut headers = reqwest::header::HeaderMap::new();
if let Some(api_key) = &self.api_key {
headers.insert(
"x-api-key",
reqwest::header::HeaderValue::from_str(api_key).map_err(|_| {
PakkerError::ConfigError("Invalid API key".to_string())
})?,
);
} else {
return Err(PakkerError::ConfigError(
"CurseForge API key required".to_string(),
));
}
Ok(headers)
}
const fn map_class_id(class_id: u32) -> ProjectType {
match class_id {
6 => ProjectType::Mod,
12 => ProjectType::ResourcePack,
6945 => ProjectType::DataPack,
6552 => ProjectType::Shader,
17 => ProjectType::World,
_ => ProjectType::Mod,
}
}
const fn map_release_type(release_type: u32) -> ReleaseType {
match release_type {
1 => ReleaseType::Release,
2 => ReleaseType::Beta,
3 => ReleaseType::Alpha,
_ => ReleaseType::Release,
}
}
fn convert_project(&self, cf_project: CurseForgeProject) -> Project {
let pakku_id = generate_pakku_id();
let project_type = Self::map_class_id(cf_project.class_id.unwrap_or(6));
let mut project = Project::new(pakku_id, project_type, ProjectSide::Both);
project.add_platform(
"curseforge".to_string(),
cf_project.id.to_string(),
cf_project.slug.clone(),
cf_project.name,
);
project.redistributable = false;
project
}
fn convert_file(
&self,
cf_file: CurseForgeFile,
project_id: &str,
) -> ProjectFile {
let mut hashes = HashMap::new();
for hash in cf_file.hashes {
hashes.insert(hash.algo.to_lowercase(), hash.value.clone());
}
let mc_versions: Vec<String> = cf_file.game_versions.clone();
// Extract loaders from sortableGameVersions with LOADER_VERSION_TYPE_ID
let loaders: Vec<String> = cf_file
.sortable_game_versions
.iter()
.filter(|v| v.game_version_type_id == Some(LOADER_VERSION_TYPE_ID))
.map(|v| v.game_version_name.to_lowercase())
.collect();
ProjectFile {
file_type: "mod".to_string(),
file_name: cf_file.file_name.clone(),
mc_versions,
loaders,
release_type: Self::map_release_type(cf_file.release_type.unwrap_or(1)),
url: cf_file.download_url.clone().unwrap_or_else(|| {
format!(
"https://edge.forgecdn.net/files/{}/{}/{}",
cf_file.id / 1000,
cf_file.id % 1000,
cf_file.file_name
)
}),
id: cf_file.id.to_string(),
parent_id: project_id.to_string(),
hashes,
required_dependencies: cf_file
.dependencies
.iter()
.filter(|d| d.relation_type == 3)
.map(|d| d.mod_id.to_string())
.collect(),
size: cf_file.file_length,
date_published: cf_file.file_date.clone(),
}
}
async fn search_project_by_slug(
&self,
slug: &str,
) -> Result<CurseForgeProject> {
let url =
format!("{CURSEFORGE_API_BASE}/mods/search?gameId=432&slug={slug}");
let response = self
.client
.get(&url)
.headers(self.get_headers()?)
.send()
.await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(slug.to_string()));
}
let result: CurseForgeSearchResponse = response.json().await?;
result
.data
.into_iter()
.find(|p| p.slug == slug)
.ok_or_else(|| PakkerError::ProjectNotFound(slug.to_string()))
}
}
#[async_trait]
impl PlatformClient for CurseForgePlatform {
async fn request_project(
&self,
identifier: &str,
_mc_versions: &[String],
_loaders: &[String],
) -> Result<Project> {
if let Ok(mod_id) = identifier.parse::<u32>() {
let url = format!("{CURSEFORGE_API_BASE}/mods/{mod_id}");
let response = self
.client
.get(&url)
.headers(self.get_headers()?)
.send()
.await?;
if response.status().is_success() {
let result: CurseForgeProjectResponse = response.json().await?;
return Ok(self.convert_project(result.data));
}
}
let cf_project = self.search_project_by_slug(identifier).await?;
Ok(self.convert_project(cf_project))
}
async fn request_project_files(
&self,
project_id: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Vec<ProjectFile>> {
let mut url = format!("{CURSEFORGE_API_BASE}/mods/{project_id}/files");
// Add query parameters for server-side filtering (Pakku-compatible)
let mut query_params = Vec::new();
// Add gameVersionTypeId for each MC version (requires lookup)
if !mc_versions.is_empty() {
// Fetch game version type IDs
// Add MC version gameVersionTypeId = 73250 for Minecraft versions
for mc_version in mc_versions {
query_params.push(("gameVersion", mc_version.clone()));
}
query_params.push(("gameVersionTypeId", "73250".to_string()));
}
// Add mod loader types
if !loaders.is_empty() {
let loader_str = loaders.join(",");
query_params.push(("modLoaderTypes", loader_str));
}
if !query_params.is_empty() {
let query_string = query_params
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join("&");
url = format!("{url}?{query_string}");
}
let response = self
.client
.get(&url)
.headers(self.get_headers()?)
.send()
.await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(project_id.to_string()));
}
let result: CurseForgeFilesResponse = response.json().await?;
let files: Vec<ProjectFile> = result
.data
.into_iter()
.map(|f| self.convert_file(f, project_id))
.collect();
Ok(files)
}
async fn request_project_with_files(
&self,
identifier: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Project> {
let mut project = self
.request_project(identifier, mc_versions, loaders)
.await?;
let project_id = project
.get_platform_id("curseforge")
.ok_or_else(|| {
PakkerError::InternalError("Missing curseforge ID".to_string())
})?
.clone();
let files = self
.request_project_files(&project_id, mc_versions, loaders)
.await?;
project.files = files;
Ok(project)
}
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
// CurseForge uses Murmur2 hash for file fingerprints
let fingerprint = hash
.parse::<u32>()
.map_err(|_| PakkerError::InvalidHash(hash.to_string()))?;
let url = format!("{CURSEFORGE_API_BASE}/fingerprints");
let response = self
.client
.post(&url)
.headers(self.get_headers()?)
.json(&serde_json::json!({
"fingerprints": [fingerprint]
}))
.send()
.await?;
if !response.status().is_success() {
return Ok(None);
}
let response_data: serde_json::Value = response.json().await?;
if let Some(matches) = response_data["data"]["exactMatches"].as_array()
&& let Some(first_match) = matches.first()
&& let Some(file) = first_match["file"].as_object()
{
let mod_id = file["modId"]
.as_u64()
.ok_or_else(|| {
PakkerError::InvalidResponse("Missing modId".to_string())
})?
.to_string();
return self
.request_project_with_files(&mod_id, &[], &[])
.await
.map(Some);
}
Ok(None)
}
}
// CurseForge API models
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeProject {
id: u32,
name: String,
slug: String,
#[serde(rename = "classId")]
class_id: Option<u32>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeGameVersion {
#[serde(rename = "gameVersionName")]
game_version_name: String,
#[serde(rename = "gameVersionTypeId")]
game_version_type_id: Option<i32>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeHash {
algo: String,
value: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeDependency {
#[serde(rename = "modId")]
mod_id: u32,
#[serde(rename = "relationType")]
relation_type: u32,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeProjectResponse {
data: CurseForgeProject,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeFile {
id: u32,
#[serde(rename = "fileName")]
file_name: String,
#[serde(rename = "downloadUrl")]
download_url: Option<String>,
#[serde(rename = "gameVersions")]
game_versions: Vec<String>,
#[serde(rename = "sortableGameVersions")]
sortable_game_versions: Vec<CurseForgeGameVersion>,
#[serde(rename = "releaseType")]
release_type: Option<u32>,
#[serde(rename = "fileLength")]
file_length: u64,
#[serde(rename = "fileDate")]
file_date: String,
hashes: Vec<CurseForgeHash>,
dependencies: Vec<CurseForgeDependency>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeFilesResponse {
data: Vec<CurseForgeFile>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct CurseForgeSearchResponse {
data: Vec<CurseForgeProject>,
}

580
src/platform/github.rs Normal file
View file

@ -0,0 +1,580 @@
use std::collections::HashMap;
use async_trait::async_trait;
use regex::Regex;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use super::traits::PlatformClient;
use crate::{
error::{PakkerError, Result},
model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType},
utils::generate_pakku_id,
};
const GITHUB_API_BASE: &str = "https://api.github.com";
pub struct GitHubPlatform {
client: Client,
token: Option<String>,
}
impl GitHubPlatform {
pub fn new(token: Option<String>) -> Self {
Self {
client: Client::new(),
token,
}
}
fn get_headers(&self) -> Result<reqwest::header::HeaderMap> {
let mut headers = reqwest::header::HeaderMap::new();
headers.insert(
reqwest::header::USER_AGENT,
reqwest::header::HeaderValue::from_static("Pakker"),
);
if let Some(token) = &self.token {
headers.insert(
reqwest::header::AUTHORIZATION,
reqwest::header::HeaderValue::from_str(&format!("Bearer {token}"))
.map_err(|_| {
PakkerError::ConfigError("Invalid GitHub token".to_string())
})?,
);
}
Ok(headers)
}
fn parse_repo_identifier(identifier: &str) -> Result<(String, String)> {
// Expected formats:
// - "owner/repo"
// - "github:owner/repo"
// - "https://github.com/owner/repo"
let identifier = identifier
.trim_start_matches("github:")
.trim_start_matches("https://github.com/")
.trim_start_matches("http://github.com/")
.trim_end_matches(".git");
let parts: Vec<&str> = identifier.split('/').collect();
if parts.len() >= 2 {
Ok((parts[0].to_string(), parts[1].to_string()))
} else {
Err(PakkerError::InvalidInput(format!(
"Invalid GitHub repository identifier: {identifier}"
)))
}
}
fn convert_release(
&self,
owner: &str,
repo: &str,
release: GitHubRelease,
) -> Project {
let pakku_id = generate_pakku_id();
let mut project =
Project::new(pakku_id, ProjectType::Mod, ProjectSide::Both);
let repo_full = format!("{owner}/{repo}");
project.add_platform(
"github".to_string(),
repo_full.clone(),
repo_full,
release.name.unwrap_or_else(|| repo.to_string()),
);
project
}
}
// Helper functions for extracting metadata from GitHub releases
fn extract_mc_versions(tag: &str, asset_name: &str) -> Vec<String> {
let re = Regex::new(r"(?:^|[^\d.])(\d+\.\d+(?:\.\d+)?)(?:[^\d]|$)").unwrap();
let mut versions = Vec::new();
log::debug!("Extracting MC versions from tag='{tag}', asset='{asset_name}'");
for text in &[tag, asset_name] {
for cap in re.captures_iter(text) {
if let Some(version) = cap.get(1) {
let v = version.as_str().to_string();
if !versions.contains(&v) {
log::debug!(" Found MC version: {v}");
versions.push(v);
}
}
}
}
log::debug!("Extracted MC versions: {versions:?}");
versions
}
fn extract_loaders(tag: &str, asset_name: &str) -> Vec<String> {
let mut loaders = Vec::new();
let text = format!("{} {}", tag.to_lowercase(), asset_name.to_lowercase());
log::debug!("Extracting loaders from: '{text}'");
if text.contains("fabric") {
log::debug!(" Found loader: fabric");
loaders.push("fabric".to_string());
}
if text.contains("forge") && !text.contains("neoforge") {
log::debug!(" Found loader: forge");
loaders.push("forge".to_string());
}
if text.contains("neoforge") {
log::debug!(" Found loader: neoforge");
loaders.push("neoforge".to_string());
}
if text.contains("quilt") {
log::debug!(" Found loader: quilt");
loaders.push("quilt".to_string());
}
log::debug!("Extracted loaders: {loaders:?}");
loaders
}
fn detect_project_type(asset_name: &str, repo_name: &str) -> ProjectType {
let name_lower = asset_name.to_lowercase();
let repo_lower = repo_name.to_lowercase();
// Check for resourcepack indicators
if name_lower.contains("resourcepack")
|| name_lower.contains("resource-pack")
|| name_lower.contains("texture")
|| repo_lower.contains("resourcepack")
|| repo_lower.contains("texture")
{
return ProjectType::ResourcePack;
}
// Check for datapack indicators
if name_lower.contains("datapack")
|| name_lower.contains("data-pack")
|| repo_lower.contains("datapack")
{
return ProjectType::DataPack;
}
// Check for shader indicators
if name_lower.contains("shader") || repo_lower.contains("shader") {
return ProjectType::Shader;
}
// Check for world/save indicators
if name_lower.contains("world")
|| name_lower.contains("save")
|| repo_lower.contains("world")
{
return ProjectType::World;
}
// Default to mod for .jar files
ProjectType::Mod
}
impl GitHubPlatform {
fn convert_asset(
&self,
asset: GitHubAsset,
release: &GitHubRelease,
repo_id: &str,
repo_name: &str,
) -> ProjectFile {
let hashes = HashMap::new();
// Extract MC versions and loaders from tag and asset name
let mc_versions = extract_mc_versions(&release.tag_name, &asset.name);
let loaders = extract_loaders(&release.tag_name, &asset.name);
// Detect project type from asset name and repo
let file_type = match detect_project_type(&asset.name, repo_name) {
ProjectType::Mod => "mod",
ProjectType::ResourcePack => "resourcepack",
ProjectType::DataPack => "datapack",
ProjectType::Shader => "shader",
ProjectType::World => "world",
};
ProjectFile {
file_type: file_type.to_string(),
file_name: asset.name.clone(),
mc_versions,
loaders,
release_type: if release.prerelease {
ReleaseType::Beta
} else {
ReleaseType::Release
},
url: asset.browser_download_url.clone(),
id: asset.id.to_string(),
parent_id: repo_id.to_string(),
hashes,
required_dependencies: vec![],
size: asset.size,
date_published: release.published_at.clone().unwrap_or_default(),
}
}
async fn get_latest_release(
&self,
owner: &str,
repo: &str,
) -> Result<GitHubRelease> {
let url = format!("{GITHUB_API_BASE}/repos/{owner}/{repo}/releases/latest");
let response = self
.client
.get(&url)
.headers(self.get_headers()?)
.send()
.await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(format!("{owner}/{repo}")));
}
let release: GitHubRelease = response.json().await?;
Ok(release)
}
async fn get_all_releases(
&self,
owner: &str,
repo: &str,
) -> Result<Vec<GitHubRelease>> {
let url = format!("{GITHUB_API_BASE}/repos/{owner}/{repo}/releases");
let response = self
.client
.get(&url)
.headers(self.get_headers()?)
.send()
.await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(format!("{owner}/{repo}")));
}
let releases: Vec<GitHubRelease> = response.json().await?;
Ok(releases)
}
}
#[async_trait]
impl PlatformClient for GitHubPlatform {
async fn request_project(
&self,
identifier: &str,
_mc_versions: &[String],
_loaders: &[String],
) -> Result<Project> {
let (owner, repo) = Self::parse_repo_identifier(identifier)?;
let release = self.get_latest_release(&owner, &repo).await?;
Ok(self.convert_release(&owner, &repo, release))
}
async fn request_project_files(
&self,
project_id: &str,
_mc_versions: &[String],
_loaders: &[String],
) -> Result<Vec<ProjectFile>> {
let (owner, repo) = Self::parse_repo_identifier(project_id)?;
let releases = self.get_all_releases(&owner, &repo).await?;
let mut files = Vec::new();
for release in releases {
for asset in &release.assets {
// Filter for .jar files (mods) or .zip files (modpacks)
if asset.name.ends_with(".jar") || asset.name.ends_with(".zip") {
let file =
self.convert_asset(asset.clone(), &release, project_id, &repo);
files.push(file);
}
}
}
Ok(files)
}
async fn request_project_with_files(
&self,
identifier: &str,
_mc_versions: &[String],
_loaders: &[String],
) -> Result<Project> {
let mut project = self
.request_project(identifier, _mc_versions, _loaders)
.await?;
let project_id = project
.get_platform_id("github")
.ok_or_else(|| {
PakkerError::InternalError("Missing github ID".to_string())
})?
.clone();
let files = self
.request_project_files(&project_id, _mc_versions, _loaders)
.await?;
project.files = files;
Ok(project)
}
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
log::debug!("GitHub lookup_by_hash: searching for hash={hash}");
// GitHub Code Search API: search for files containing the hash
// Note: This is rate-limited (10 req/min without auth, 30 req/min with
// auth)
let url = format!("{GITHUB_API_BASE}/search/code?q={hash}+in:file");
log::debug!("GitHub search URL: {url}");
let response = match self
.client
.get(&url)
.headers(self.get_headers()?)
.send()
.await
{
Ok(resp) => {
log::debug!("GitHub search response status: {}", resp.status());
resp
},
Err(e) => {
log::warn!("GitHub hash lookup failed: {e}");
return Ok(None);
},
};
// Handle rate limiting gracefully
if response.status().as_u16() == 403 {
log::warn!("GitHub API rate limit exceeded for hash lookup");
return Ok(None);
}
if !response.status().is_success() {
log::debug!(
"GitHub search returned non-success status: {}",
response.status()
);
return Ok(None);
}
let search_result: GitHubCodeSearchResult = match response.json().await {
Ok(result) => result,
Err(e) => {
log::warn!("Failed to parse GitHub search result: {e}");
return Ok(None);
},
};
log::debug!("GitHub search found {} items", search_result.items.len());
// If we found matches, try to extract repo info from first result
if let Some(item) = search_result.items.first() {
let repo_full = item.repository.full_name.clone();
log::info!("GitHub hash lookup found match in repo: {repo_full}");
// Try to get the latest release for this repo
match self.request_project(&repo_full, &[], &[]).await {
Ok(project) => {
log::info!("GitHub hash lookup succeeded for {repo_full}");
Ok(Some(project))
},
Err(e) => {
log::warn!("Failed to fetch project for {repo_full}: {e}");
Ok(None)
},
}
} else {
log::debug!("GitHub hash lookup found no matches");
Ok(None)
}
}
}
// GitHub API models
#[derive(Debug, Clone, Deserialize, Serialize)]
struct GitHubRelease {
id: u64,
tag_name: String,
name: Option<String>,
prerelease: bool,
published_at: Option<String>,
assets: Vec<GitHubAsset>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct GitHubAsset {
id: u64,
name: String,
browser_download_url: String,
size: u64,
}
#[derive(Debug, Deserialize)]
struct GitHubCodeSearchResult {
items: Vec<GitHubCodeSearchItem>,
}
#[derive(Debug, Deserialize)]
struct GitHubCodeSearchItem {
repository: GitHubRepository,
}
#[derive(Debug, Deserialize)]
struct GitHubRepository {
full_name: String,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_repo_identifier() {
let cases = vec![
("owner/repo", ("owner", "repo")),
("github:owner/repo", ("owner", "repo")),
("https://github.com/owner/repo", ("owner", "repo")),
("https://github.com/owner/repo.git", ("owner", "repo")),
];
for (input, (expected_owner, expected_repo)) in cases {
let (owner, repo) = GitHubPlatform::parse_repo_identifier(input).unwrap();
assert_eq!(owner, expected_owner);
assert_eq!(repo, expected_repo);
}
}
#[test]
fn test_parse_repo_identifier_invalid() {
let result = GitHubPlatform::parse_repo_identifier("invalid");
assert!(result.is_err());
}
#[test]
fn test_extract_mc_versions() {
let cases = vec![
("1.20.4-forge-1.0.0", "", vec!["1.20.4", "1.0.0"]),
("fabric-1.21-1.0.0", "", vec!["1.21"]),
("mc1.20.4", "", vec!["1.20.4"]),
("1.20.1-1.20.2", "", vec!["1.20.1"]),
("mymod-1.0.0", "", vec!["1.0.0"]),
("mc1.20.4-v1.0.0", "", vec!["1.20.4", "1.0.0"]),
("v1.0.0", "mymod-1.20.4.jar", vec!["1.0.0", "1.20.4"]),
("1.20.1-47.1.0", "", vec!["1.20.1"]),
("v0.5.1+1.20.1", "", vec!["0.5.1"]),
("1.20.4-1.0.0+fabric", "", vec!["1.20.4"]),
("mc1.19.2-v2.1.3", "", vec!["1.19.2", "2.1.3"]),
("1.20-Snapshot", "", vec!["1.20"]),
("v3.0.0-beta.2+mc1.20.4", "", vec!["3.0.0", "1.20.4"]),
("1.16.5-1.0", "", vec!["1.16.5"]),
("forge-1.20.1-47.2.0", "", vec!["1.20.1"]),
("1.20.2-neoforge-20.2.59", "", vec!["1.20.2", "20.2.59"]),
("release-1.20.1", "", vec!["1.20.1"]),
("1.19.4_v2.5.0", "", vec!["1.19.4", "2.5.0"]),
("MC1.18.2-v1.0.0", "", vec!["1.18.2", "1.0.0"]),
("1.20.1-forge-v1.2.3", "", vec!["1.20.1", "1.2.3"]),
("Minecraft_1.19.2-v0.8.1", "", vec!["1.19.2", "0.8.1"]),
("build-1.20.4-2.1.0", "", vec!["1.20.4"]),
("1.20.x-1.5.0", "", vec!["1.20", "1.5.0"]),
("1.12.2-14.23.5.2859", "", vec!["1.12.2"]),
];
for (tag, asset, expected) in cases {
let result = extract_mc_versions(tag, asset);
assert_eq!(
result, expected,
"Failed for tag: {}, asset: {}",
tag, asset
);
}
}
#[test]
fn test_extract_loaders() {
let cases = vec![
("1.20.4-forge-1.0.0", "", vec!["forge"]),
("fabric-1.21-1.0.0", "", vec!["fabric"]),
("1.20.1-neoforge", "", vec!["neoforge"]),
("quilt-1.20.4", "", vec!["quilt"]),
("mymod-1.0.0", "", vec![]),
("1.20.4-forge-fabric", "", vec!["fabric", "forge"]), /* Alphabetical
* order */
("v1.0.0", "mymod-fabric-1.20.4.jar", vec!["fabric"]),
// Real-world patterns
("1.20.1-forge-47.1.0", "", vec!["forge"]),
("fabric-api-0.92.0+1.20.4", "", vec!["fabric"]),
("1.19.2-neoforge-20.2.59", "", vec!["neoforge"]),
("quilt-loader-0.23.0", "", vec!["quilt"]),
("1.20.4-Fabric-1.0.0", "", vec!["fabric"]), // Capitalized
("forge-1.20.1", "", vec!["forge"]),
("v1.0.0-fabric", "", vec!["fabric"]),
("1.18.2-forge+fabric", "", vec!["fabric", "forge"]), // Both loaders
("NeoForge-1.20.2", "", vec!["neoforge"]), /* Capitalized
* NeoForge */
("1.12.2-forge-14.23.5.2859", "", vec!["forge"]), // Old format
];
for (tag, asset, expected) in cases {
let result = extract_loaders(tag, asset);
assert_eq!(
result, expected,
"Failed for tag: {}, asset: {}",
tag, asset
);
}
}
#[test]
fn test_detect_project_type() {
let cases = vec![
("mymod.jar", "mymod", crate::model::ProjectType::Mod),
(
"texture-pack.zip",
"texture",
crate::model::ProjectType::ResourcePack,
),
(
"resourcepack.zip",
"resources",
crate::model::ProjectType::ResourcePack,
),
(
"datapack.zip",
"data-stuff",
crate::model::ProjectType::DataPack,
),
(
"shader.zip",
"awesome-shaders",
crate::model::ProjectType::Shader,
),
("world.zip", "my-world", crate::model::ProjectType::World),
("save.zip", "survival", crate::model::ProjectType::World),
("unknown.zip", "stuff", crate::model::ProjectType::Mod),
];
for (filename, repo_name, expected) in cases {
let result = detect_project_type(filename, repo_name);
assert_eq!(
result, expected,
"Failed for filename: {}, repo: {}",
filename, repo_name
);
}
}
}

282
src/platform/modrinth.rs Normal file
View file

@ -0,0 +1,282 @@
use std::collections::HashMap;
use async_trait::async_trait;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use super::traits::PlatformClient;
use crate::{
error::{PakkerError, Result},
model::{Project, ProjectFile, ProjectSide, ProjectType, ReleaseType},
utils::generate_pakku_id,
};
const MODRINTH_API_BASE: &str = "https://api.modrinth.com/v2";
pub struct ModrinthPlatform {
client: Client,
}
impl ModrinthPlatform {
pub fn new() -> Self {
Self {
client: Client::new(),
}
}
fn map_project_type(type_str: &str) -> ProjectType {
match type_str {
"mod" => ProjectType::Mod,
"resourcepack" => ProjectType::ResourcePack,
"datapack" => ProjectType::DataPack,
"shader" => ProjectType::Shader,
_ => ProjectType::Mod,
}
}
const fn map_side(client: bool, server: bool) -> ProjectSide {
match (client, server) {
(true, true) => ProjectSide::Both,
(true, false) => ProjectSide::Client,
(false, true) => ProjectSide::Server,
_ => ProjectSide::Both,
}
}
fn map_release_type(type_str: &str) -> ReleaseType {
match type_str {
"release" => ReleaseType::Release,
"beta" => ReleaseType::Beta,
"alpha" => ReleaseType::Alpha,
_ => ReleaseType::Release,
}
}
fn convert_project(&self, mr_project: ModrinthProject) -> Project {
let pakku_id = generate_pakku_id();
let mut project = Project::new(
pakku_id,
Self::map_project_type(&mr_project.project_type),
Self::map_side(
mr_project.client_side != "unsupported",
mr_project.server_side != "unsupported",
),
);
project.add_platform(
"modrinth".to_string(),
mr_project.id.clone(),
mr_project.slug.clone(),
mr_project.title,
);
project
}
fn convert_version(
&self,
mr_version: ModrinthVersion,
project_id: &str,
) -> ProjectFile {
let mut hashes = HashMap::new();
// Get primary file
let primary_file = mr_version
.files
.iter()
.find(|f| f.primary)
.or_else(|| mr_version.files.first())
.expect("Version must have at least one file");
for (algo, hash) in &primary_file.hashes {
hashes.insert(algo.clone(), hash.clone());
}
ProjectFile {
file_type: "mod".to_string(),
file_name: primary_file.filename.clone(),
mc_versions: mr_version.game_versions.clone(),
loaders: mr_version.loaders.clone(),
release_type: Self::map_release_type(&mr_version.version_type),
url: primary_file.url.clone(),
id: mr_version.id.clone(),
parent_id: project_id.to_string(),
hashes,
required_dependencies: mr_version
.dependencies
.iter()
.filter(|d| d.dependency_type == "required")
.filter_map(|d| d.project_id.clone())
.collect(),
size: primary_file.size,
date_published: mr_version.date_published.clone(),
}
}
}
#[async_trait]
impl PlatformClient for ModrinthPlatform {
async fn request_project(
&self,
identifier: &str,
_mc_versions: &[String],
_loaders: &[String],
) -> Result<Project> {
let url = format!("{MODRINTH_API_BASE}/project/{identifier}");
let response = self.client.get(&url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(identifier.to_string()));
}
let mr_project: ModrinthProject = response.json().await?;
Ok(self.convert_project(mr_project))
}
async fn request_project_files(
&self,
project_id: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Vec<ProjectFile>> {
let mut url = format!("{MODRINTH_API_BASE}/project/{project_id}/version");
// Add query parameters
let mut params = vec![];
if !mc_versions.is_empty() {
params.push(format!(
"game_versions=[{}]",
mc_versions
.iter()
.map(|v| format!("\"{v}\""))
.collect::<Vec<_>>()
.join(",")
));
}
if !loaders.is_empty() {
params.push(format!(
"loaders=[{}]",
loaders
.iter()
.map(|l| format!("\"{l}\""))
.collect::<Vec<_>>()
.join(",")
));
}
if !params.is_empty() {
url.push('?');
url.push_str(&params.join("&"));
}
let response = self.client.get(&url).send().await?;
if !response.status().is_success() {
return Err(PakkerError::ProjectNotFound(project_id.to_string()));
}
let mr_versions: Vec<ModrinthVersion> = response.json().await?;
Ok(
mr_versions
.into_iter()
.map(|v| self.convert_version(v, project_id))
.collect(),
)
}
async fn request_project_with_files(
&self,
identifier: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Project> {
let mut project = self
.request_project(identifier, mc_versions, loaders)
.await?;
let project_id = project
.get_platform_id("modrinth")
.ok_or_else(|| {
PakkerError::InternalError("Missing modrinth ID".to_string())
})?
.clone();
let files = self
.request_project_files(&project_id, mc_versions, loaders)
.await?;
project.files = files;
Ok(project)
}
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>> {
// Modrinth uses SHA-1 hash for file lookups
let url = format!("{MODRINTH_API_BASE}/version_file/{hash}");
let response = self.client.get(&url).send().await?;
if response.status().as_u16() == 404 {
return Ok(None);
}
if !response.status().is_success() {
return Err(PakkerError::PlatformApiError(format!(
"Modrinth API error: {}",
response.status()
)));
}
let version_data: serde_json::Value = response.json().await?;
let project_id = version_data["project_id"].as_str().ok_or_else(|| {
PakkerError::InvalidResponse("Missing project_id".to_string())
})?;
self
.request_project_with_files(project_id, &[], &[])
.await
.map(Some)
}
}
// Modrinth API models
#[derive(Debug, Clone, Deserialize, Serialize)]
struct ModrinthProject {
id: String,
slug: String,
title: String,
#[serde(rename = "project_type")]
project_type: String,
client_side: String,
server_side: String,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct ModrinthVersion {
id: String,
project_id: String,
name: String,
version_number: String,
game_versions: Vec<String>,
version_type: String,
loaders: Vec<String>,
date_published: String,
files: Vec<ModrinthFile>,
dependencies: Vec<ModrinthDependency>,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct ModrinthFile {
hashes: HashMap<String, String>,
url: String,
filename: String,
primary: bool,
size: u64,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
struct ModrinthDependency {
project_id: Option<String>,
dependency_type: String,
}

32
src/platform/traits.rs Normal file
View file

@ -0,0 +1,32 @@
use async_trait::async_trait;
use crate::{error::Result, model::Project};
#[async_trait]
pub trait PlatformClient: Send + Sync {
/// Request a single project by identifier
async fn request_project(
&self,
project_id: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Project>;
/// Request files for a project
async fn request_project_files(
&self,
project_id: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Vec<crate::model::ProjectFile>>;
/// Request a project with its files
async fn request_project_with_files(
&self,
project_id: &str,
mc_versions: &[String],
loaders: &[String],
) -> Result<Project>;
async fn lookup_by_hash(&self, hash: &str) -> Result<Option<Project>>;
}

104
src/rate_limiter.rs Normal file
View file

@ -0,0 +1,104 @@
use std::{
collections::HashMap,
sync::Arc,
time::{Duration, Instant},
};
use tokio::sync::Mutex;
use crate::error::Result;
#[derive(Clone)]
pub struct RateLimiter {
inner: Arc<Mutex<RateLimiterInner>>,
}
struct RateLimiterInner {
requests: HashMap<String, Vec<Instant>>,
config: RateLimitConfig,
}
#[derive(Clone, Debug)]
pub struct RateLimitConfig {
pub modrinth_requests_per_min: u32,
pub modrinth_burst: u32,
pub curseforge_requests_per_min: u32,
pub curseforge_burst: u32,
pub github_requests_per_min: u32,
pub github_burst: u32,
pub default_requests_per_min: u32,
pub default_burst: u32,
}
impl Default for RateLimitConfig {
fn default() -> Self {
Self {
modrinth_requests_per_min: 100,
modrinth_burst: 10,
curseforge_requests_per_min: 60,
curseforge_burst: 5,
github_requests_per_min: 50,
github_burst: 5,
default_requests_per_min: 30,
default_burst: 3,
}
}
}
impl RateLimiter {
pub fn new(config: Option<RateLimitConfig>) -> Self {
Self {
inner: Arc::new(Mutex::new(RateLimiterInner {
requests: HashMap::new(),
config: config.unwrap_or_default(),
})),
}
}
pub async fn acquire(&self, platform: &str) -> Result<()> {
let config = {
let inner = self.inner.lock().await;
inner.config.clone()
};
let (rate, burst) = match platform.to_lowercase().as_str() {
"modrinth" => (config.modrinth_requests_per_min, config.modrinth_burst),
"curseforge" => {
(config.curseforge_requests_per_min, config.curseforge_burst)
},
"github" => (config.github_requests_per_min, config.github_burst),
_ => (config.default_requests_per_min, config.default_burst),
};
let interval = Duration::from_secs(60) / rate.max(1);
let mut inner = self.inner.lock().await;
let now = Instant::now();
let platform_requests =
inner.requests.entry(platform.to_string()).or_default();
platform_requests
.retain(|t| now.duration_since(*t) < Duration::from_secs(60));
if platform_requests.len() >= burst as usize {
if let Some(oldest) = platform_requests.first() {
let wait_time = interval.saturating_sub(now.duration_since(*oldest));
if wait_time > Duration::ZERO {
drop(inner);
tokio::time::sleep(wait_time).await;
}
}
}
let mut inner = self.inner.lock().await;
let platform_requests =
inner.requests.entry(platform.to_string()).or_default();
platform_requests.push(Instant::now());
Ok(())
}
pub async fn wait_for(&self, platform: &str) {
let _ = self.acquire(platform).await;
}
}

150
src/resolver.rs Normal file
View file

@ -0,0 +1,150 @@
use std::collections::{HashMap, HashSet};
use crate::{
error::{PakkerError, Result},
model::{LockFile, Project},
platform::PlatformClient,
};
pub struct DependencyResolver {
visited: HashSet<String>,
path: Vec<String>,
}
impl DependencyResolver {
pub fn new() -> Self {
Self {
visited: HashSet::new(),
path: Vec::new(),
}
}
pub fn resolve<'a>(
&'a mut self,
project: &'a mut Project,
lockfile: &'a mut LockFile,
platforms: &'a HashMap<String, Box<dyn PlatformClient>>,
) -> std::pin::Pin<
Box<dyn std::future::Future<Output = Result<Vec<Project>>> + 'a>,
> {
Box::pin(async move {
let mut resolved = Vec::new();
if let Some(ref pakku_id) = project.pakku_id {
if lockfile.get_project(pakku_id).is_some() {
log::debug!("Project already in lockfile: {}", project.get_name());
return Ok(resolved);
}
if self.path.contains(pakku_id) {
let cycle_path = self.path.join(" -> ");
return Err(PakkerError::CircularDependency(format!(
"{cycle_path} -> {pakku_id}"
)));
}
self.path.push(pakku_id.clone());
} else {
return Ok(resolved);
}
let mut dependencies_set: HashSet<String> = HashSet::new();
for file in &project.files {
for dep_id in &file.required_dependencies {
dependencies_set.insert(dep_id.clone());
}
}
let dependencies: Vec<String> = dependencies_set.into_iter().collect();
for dep_id in dependencies {
let existing_pakku_id = lockfile
.find_project_by_platform_id("modrinth", &dep_id)
.or_else(|| {
lockfile.find_project_by_platform_id("curseforge", &dep_id)
})
.or_else(|| lockfile.find_project_by_platform_id("github", &dep_id))
.map(|p| p.pakku_id.clone());
if let Some(Some(existing_id)) = existing_pakku_id {
if let Some(ref my_id) = project.pakku_id {
project.pakku_links.insert(existing_id.clone());
if let Some(existing_mut) = lockfile.find_project_mut(&existing_id)
{
existing_mut.pakku_links.insert(my_id.clone());
}
}
continue;
}
let mut dep_project =
self.fetch_dependency(&dep_id, lockfile, platforms).await?;
if let (Some(dep_id), Some(my_id)) =
(&dep_project.pakku_id, &project.pakku_id)
{
project.pakku_links.insert(dep_id.clone());
dep_project.pakku_links.insert(my_id.clone());
}
let mut sub_deps =
self.resolve(&mut dep_project, lockfile, platforms).await?;
resolved.push(dep_project);
resolved.append(&mut sub_deps);
}
if let Some(ref pakku_id) = project.pakku_id {
self.visited.insert(pakku_id.clone());
}
self.path.pop();
Ok(resolved)
})
}
async fn fetch_dependency(
&self,
dep_id: &str,
lockfile: &LockFile,
platforms: &HashMap<String, Box<dyn PlatformClient>>,
) -> Result<Project> {
let mut projects = Vec::new();
for (platform_name, client) in platforms {
match client
.request_project_with_files(
dep_id,
&lockfile.mc_versions,
&lockfile.get_loader_names(),
)
.await
{
Ok(project) => {
log::info!("Found dependency {dep_id} on {platform_name}");
projects.push(project);
},
Err(e) => {
log::debug!("Could not find {dep_id} on {platform_name}: {e}");
},
}
}
if projects.is_empty() {
return Err(PakkerError::ProjectNotFound(dep_id.to_string()));
}
if projects.len() == 1 {
Ok(projects.into_iter().next().unwrap())
} else {
let mut merged = projects.remove(0);
for project in projects {
merged.merge(project);
}
Ok(merged)
}
}
}
impl Default for DependencyResolver {
fn default() -> Self {
Self::new()
}
}

77
src/ui_utils.rs Normal file
View file

@ -0,0 +1,77 @@
// UI utility functions for terminal formatting and interactive prompts
use std::io;
use dialoguer::{Confirm, MultiSelect, Select, theme::ColorfulTheme};
/// Creates a terminal hyperlink using OSC 8 escape sequence
/// Format: \x1b]8;;<URL>\x1b\\<TEXT>\x1b]8;;\x1b\\
pub fn hyperlink(url: &str, text: &str) -> String {
format!("\x1b]8;;{url}\x1b\\{text}\x1b]8;;\x1b\\")
}
/// Prompts user with a yes/no question
/// Returns true for yes, false for no
pub fn prompt_yes_no(question: &str, default: bool) -> io::Result<bool> {
Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.default(default)
.interact()
.map_err(io::Error::other)
}
/// Prompts user to select from a list of options
/// Returns the index of the selected option
#[allow(dead_code)]
pub fn prompt_select(question: &str, options: &[&str]) -> io::Result<usize> {
Select::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.items(options)
.default(0)
.interact()
.map_err(io::Error::other)
}
/// Prompts user to select multiple items from a list
/// Returns the indices of the selected options
#[allow(dead_code)]
pub fn prompt_multi_select(
question: &str,
options: &[&str],
) -> io::Result<Vec<usize>> {
MultiSelect::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.items(options)
.interact()
.map_err(io::Error::other)
}
/// Creates a formatted project URL for Modrinth
#[allow(dead_code)]
pub fn modrinth_project_url(slug: &str) -> String {
format!("https://modrinth.com/mod/{slug}")
}
/// Creates a formatted project URL for `CurseForge`
#[allow(dead_code)]
pub fn curseforge_project_url(project_id: &str) -> String {
format!("https://www.curseforge.com/minecraft/mc-mods/{project_id}")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_hyperlink() {
let result = hyperlink("https://example.com", "Example");
assert!(result.contains("https://example.com"));
assert!(result.contains("Example"));
}
#[test]
fn test_modrinth_url() {
let url = modrinth_project_url("sodium");
assert_eq!(url, "https://modrinth.com/mod/sodium");
}
}

6
src/utils.rs Normal file
View file

@ -0,0 +1,6 @@
pub mod hash;
pub mod id;
pub mod prompt;
pub use hash::verify_hash;
pub use id::generate_pakku_id;

231
src/utils/hash.rs Normal file
View file

@ -0,0 +1,231 @@
use std::{
fs::File,
io::{BufReader, Read},
path::Path,
};
use md5::{Digest as Md5Digest, Md5};
use sha1::Sha1;
use sha2::{Sha256, Sha512};
use crate::error::{PakkerError, Result};
/// Compute Murmur2 hash (32-bit) for `CurseForge` fingerprinting
#[allow(dead_code)]
pub fn compute_murmur2_hash(data: &[u8]) -> u32 {
murmur2_hash(data, 1)
}
/// Murmur2 hash implementation
#[allow(dead_code)]
fn murmur2_hash(data: &[u8], seed: u32) -> u32 {
const M: u32 = 0x5BD1E995;
const R: i32 = 24;
let mut h: u32 = seed ^ (data.len() as u32);
let mut chunks = data.chunks_exact(4);
for chunk in chunks.by_ref() {
let mut k = u32::from_le_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]);
k = k.wrapping_mul(M);
k ^= k >> R;
k = k.wrapping_mul(M);
h = h.wrapping_mul(M);
h ^= k;
}
let remainder = chunks.remainder();
match remainder.len() {
3 => {
h ^= u32::from(remainder[2]) << 16;
h ^= u32::from(remainder[1]) << 8;
h ^= u32::from(remainder[0]);
h = h.wrapping_mul(M);
},
2 => {
h ^= u32::from(remainder[1]) << 8;
h ^= u32::from(remainder[0]);
h = h.wrapping_mul(M);
},
1 => {
h ^= u32::from(remainder[0]);
h = h.wrapping_mul(M);
},
_ => {},
}
h ^= h >> 13;
h = h.wrapping_mul(M);
h ^= h >> 15;
h
}
/// Compute SHA1 hash of a file
pub fn compute_sha1<P: AsRef<Path>>(path: P) -> Result<String> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
let mut hasher = Sha1::new();
let mut buffer = [0; 8192];
loop {
let n = reader.read(&mut buffer)?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
}
/// Compute SHA256 hash of a file
pub fn compute_sha256<P: AsRef<Path>>(path: P) -> Result<String> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
let mut hasher = Sha256::new();
let mut buffer = [0; 8192];
loop {
let n = reader.read(&mut buffer)?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
}
/// Compute SHA256 hash of byte data
pub fn compute_sha256_bytes(data: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(data);
format!("{:x}", hasher.finalize())
}
/// Compute SHA512 hash of a file
pub fn compute_sha512<P: AsRef<Path>>(path: P) -> Result<String> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
let mut hasher = Sha512::new();
let mut buffer = [0; 8192];
loop {
let n = reader.read(&mut buffer)?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
}
/// Compute MD5 hash of a file
pub fn compute_md5<P: AsRef<Path>>(path: P) -> Result<String> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
let mut hasher = Md5::new();
let mut buffer = [0; 8192];
loop {
let n = reader.read(&mut buffer)?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
Ok(format!("{:x}", hasher.finalize()))
}
/// Verify a file's hash against expected value
pub fn verify_hash<P: AsRef<Path>>(
path: P,
algorithm: &str,
expected: &str,
) -> Result<bool> {
let path = path.as_ref();
let actual = match algorithm {
"sha1" => compute_sha1(path)?,
"sha256" => compute_sha256(path)?,
"sha512" => compute_sha512(path)?,
"md5" => compute_md5(path)?,
_ => {
return Err(PakkerError::InternalError(format!(
"Unknown hash algorithm: {algorithm}"
)));
},
};
Ok(actual.eq_ignore_ascii_case(expected))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_murmur2_hash_deterministic() {
let data = b"hello world";
let hash1 = compute_murmur2_hash(data);
let hash2 = compute_murmur2_hash(data);
assert_eq!(hash1, hash2, "Murmur2 hash must be deterministic");
}
#[test]
fn test_murmur2_hash_empty() {
let data = b"";
let hash = compute_murmur2_hash(data);
assert_ne!(hash, 0, "Empty data should produce a non-zero hash");
}
#[test]
fn test_murmur2_hash_different_inputs() {
let hash1 = compute_murmur2_hash(b"hello");
let hash2 = compute_murmur2_hash(b"world");
assert_ne!(
hash1, hash2,
"Different inputs should produce different hashes"
);
}
#[test]
fn test_sha256_bytes_deterministic() {
let data = b"test data";
let hash1 = compute_sha256_bytes(data);
let hash2 = compute_sha256_bytes(data);
assert_eq!(hash1, hash2, "SHA256 must be deterministic");
}
#[test]
fn test_sha256_bytes_format() {
let data = b"hello";
let hash = compute_sha256_bytes(data);
assert_eq!(hash.len(), 64, "SHA256 hex should be 64 characters");
assert!(
hash.chars().all(|c| c.is_ascii_hexdigit()),
"SHA256 should only contain hex digits"
);
}
#[test]
fn test_sha256_bytes_empty() {
let hash = compute_sha256_bytes(b"");
assert_eq!(
hash,
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
);
}
#[test]
fn test_sha256_bytes_known_value() {
// SHA256 of "hello" in hex
let expected =
"2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824";
let hash = compute_sha256_bytes(b"hello");
assert_eq!(hash, expected);
}
}

35
src/utils/id.rs Normal file
View file

@ -0,0 +1,35 @@
use rand::Rng;
const CHARSET: &[u8] =
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
const ID_LENGTH: usize = 16;
/// Generate a random 16-character alphanumeric pakku ID
pub fn generate_pakku_id() -> String {
let mut rng = rand::rng();
(0..ID_LENGTH)
.map(|_| {
let idx = rng.random_range(0..CHARSET.len());
CHARSET[idx] as char
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_generate_pakku_id() {
let id = generate_pakku_id();
assert_eq!(id.len(), ID_LENGTH);
assert!(id.chars().all(|c| c.is_alphanumeric()));
}
#[test]
fn test_unique_ids() {
let id1 = generate_pakku_id();
let id2 = generate_pakku_id();
assert_ne!(id1, id2);
}
}

56
src/utils/prompt.rs Normal file
View file

@ -0,0 +1,56 @@
use std::io::{self, Write};
use crate::error::Result;
#[allow(dead_code)]
pub fn prompt_user(message: &str) -> Result<String> {
print!("{message}");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
Ok(input.trim().to_string())
}
#[allow(dead_code)]
pub fn prompt_select(message: &str, options: &[String]) -> Result<usize> {
println!("{message}");
for (i, option) in options.iter().enumerate() {
println!(" {}. {}", i + 1, option);
}
loop {
print!("Select (1-{}): ", options.len());
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
if let Ok(choice) = input.trim().parse::<usize>()
&& choice > 0
&& choice <= options.len()
{
return Ok(choice - 1);
}
println!("Invalid selection. Please try again.");
}
}
#[allow(dead_code)]
pub fn prompt_confirm(message: &str) -> Result<bool> {
print!("{message} (y/n): ");
io::stdout().flush()?;
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let answer = input.trim().to_lowercase();
Ok(answer == "y" || answer == "yes")
}
#[allow(dead_code)]
pub fn confirm(message: &str) -> Result<bool> {
prompt_confirm(message)
}

65
tests/common/mod.rs Normal file
View file

@ -0,0 +1,65 @@
use std::{env, error::Error, fs, path::PathBuf};
use git2::{Repository, Signature};
pub fn pakker_bin_path() -> PathBuf {
let manifest = env!("CARGO_MANIFEST_DIR");
PathBuf::from(manifest).join("target/debug/pakker")
}
pub fn init_bare_repo(path: &PathBuf) -> Result<Repository, git2::Error> {
Repository::init_bare(path)
}
pub fn init_repo_with_commit(
path: &PathBuf,
file: &str,
content: &str,
) -> Result<Repository, Box<dyn Error>> {
let repo = Repository::init(path)?;
let sig = Signature::now("Test User", "test@example.com")?;
let workdir = repo.workdir().ok_or("no workdir")?;
let file_path = workdir.join(file);
fs::write(&file_path, content)?;
let mut index = repo.index()?;
index.add_path(std::path::Path::new(file))?;
index.write()?;
let tree_oid = index.write_tree()?;
let tree = repo.find_tree(tree_oid)?;
// initial commit
let commit_oid =
repo.commit(Some("HEAD"), &sig, &sig, "initial", &tree, &[])?;
// create/ensure master branch points to this commit and set HEAD
repo.reference("refs/heads/master", commit_oid, true, "create master")?;
repo.set_head("refs/heads/master")?;
// drop tree to avoid holding a borrow of `repo` when returning it
drop(tree);
Ok(repo)
}
pub fn push_to_remote(
repo: &Repository,
remote_name: &str,
remote_url: &str,
) -> Result<(), git2::Error> {
// Try to create the remote; if it already exists, find it instead
let mut remote = match repo.remote(remote_name, remote_url) {
Ok(r) => r,
Err(_) => repo.find_remote(remote_name)?,
};
// Push current HEAD to refs/heads/master on remote
remote.push(&["HEAD:refs/heads/master"], None)?;
// If remote is a local filesystem path, ensure its HEAD points to master
if remote_url.starts_with('/')
&& let Ok(bare) = Repository::open(remote_url)
{
// Set bare repo HEAD to refs/heads/master
let _ = bare.set_head("refs/heads/master");
}
Ok(())
}

196
tests/fork_from_path.rs Normal file
View file

@ -0,0 +1,196 @@
use std::{error::Error, fs, process::Command};
use git2::{Repository, Signature};
use tempfile::TempDir;
// shared test helpers
mod common;
use common::{
init_bare_repo,
init_repo_with_commit,
pakker_bin_path,
push_to_remote,
};
#[test]
fn happy_path_from_path() -> Result<(), Box<dyn Error>> {
let tmp = TempDir::new()?;
let tmp_path = tmp.path().to_path_buf();
let upstream = tmp_path.join("upstream.git");
init_bare_repo(&upstream)?;
let work = tmp_path.join("work_repo");
fs::create_dir_all(&work)?;
let work_repo = init_repo_with_commit(&work, "README.md", "hello")?;
push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?;
// Clone upstream to local path
let local = tmp_path.join("local_clone");
Repository::clone(upstream.to_str().unwrap(), &local)?;
// Now create a new project dir where pakker will be initialized
let project = tmp_path.join("project_dir");
fs::create_dir_all(&project)?;
let pakker = pakker_bin_path();
let status = Command::new(pakker)
.args([
"fork",
"init",
"--from-path",
local.to_str().unwrap(),
"--ref-name",
"master",
])
.current_dir(&project)
.status()?;
assert!(status.success());
let parent = project.join(".pakku").join("parent");
assert!(parent.exists());
Ok(())
}
#[test]
fn fails_with_uncommitted_changes() -> Result<(), Box<dyn Error>> {
let tmp = TempDir::new()?;
let tmp_path = tmp.path().to_path_buf();
let upstream = tmp_path.join("upstream2.git");
init_bare_repo(&upstream)?;
let work = tmp_path.join("work2");
fs::create_dir_all(&work)?;
let work_repo = init_repo_with_commit(&work, "file.txt", "a")?;
work_repo.remote("origin", upstream.to_str().unwrap())?;
push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?;
let local = tmp_path.join("local2");
Repository::clone(upstream.to_str().unwrap(), &local)?;
// Make an uncommitted change
fs::write(local.join("UNCOMMITTED.md"), "oops")?;
let project = tmp_path.join("project2");
fs::create_dir_all(&project)?;
let pakker = pakker_bin_path();
let output = Command::new(pakker)
.args([
"fork",
"init",
"--from-path",
local.to_str().unwrap(),
"--ref-name",
"master",
])
.current_dir(&project)
.output()?;
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
stderr.to_lowercase().contains("uncommitted")
|| stderr.to_lowercase().contains("dirty")
);
Ok(())
}
#[test]
fn fails_when_local_ahead() -> Result<(), Box<dyn Error>> {
let tmp = TempDir::new()?;
let tmp_path = tmp.path().to_path_buf();
let upstream = tmp_path.join("upstream3.git");
init_bare_repo(&upstream)?;
let work = tmp_path.join("work3");
fs::create_dir_all(&work)?;
let work_repo = init_repo_with_commit(&work, "f.txt", "1")?;
work_repo.remote("origin", upstream.to_str().unwrap())?;
push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?;
let local = tmp_path.join("local3");
Repository::clone(upstream.to_str().unwrap(), &local)?;
// Create a new commit locally that is not pushed
{
let repo = Repository::open(&local)?;
let workdir = repo.workdir().ok_or("no workdir")?;
fs::write(workdir.join("f.txt"), "2")?;
let mut index = repo.index()?;
index.add_path(std::path::Path::new("f.txt"))?;
index.write()?;
let tree_oid = index.write_tree()?;
let tree = repo.find_tree(tree_oid)?;
let sig = Signature::now("Test User", "test@example.com")?;
let head = repo.head()?;
let parent = head.peel_to_commit()?;
repo.commit(Some("HEAD"), &sig, &sig, "local change", &tree, &[&parent])?;
}
let project = tmp_path.join("project3");
fs::create_dir_all(&project)?;
let pakker = pakker_bin_path();
let output = Command::new(pakker)
.args([
"fork",
"init",
"--from-path",
local.to_str().unwrap(),
"--ref-name",
"master",
])
.current_dir(&project)
.output()?;
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(
stderr.to_lowercase().contains("commits not present")
|| stderr.to_lowercase().contains("not present on upstream")
);
Ok(())
}
#[test]
fn warns_on_fetch_failure_and_proceeds() -> Result<(), Box<dyn Error>> {
let tmp = TempDir::new()?;
let tmp_path = tmp.path().to_path_buf();
let upstream = tmp_path.join("upstream4.git");
init_bare_repo(&upstream)?;
let work = tmp_path.join("work4");
fs::create_dir_all(&work)?;
let work_repo = init_repo_with_commit(&work, "a.txt", "1")?;
work_repo.remote("origin", upstream.to_str().unwrap())?;
push_to_remote(&work_repo, "origin", upstream.to_str().unwrap())?;
let local = tmp_path.join("local4");
Repository::clone(upstream.to_str().unwrap(), &local)?;
// Set an invalid URL for origin to force fetch failure
let repo = Repository::open(&local)?;
repo.remote_set_url("origin", "git@invalid:nonexistent/repo.git")?;
let project = tmp_path.join("project4");
fs::create_dir_all(&project)?;
let pakker = pakker_bin_path();
let status = Command::new(pakker)
.args([
"fork",
"init",
"--from-path",
local.to_str().unwrap(),
"--ref-name",
"master",
])
.current_dir(&project)
.status()?;
assert!(status.success());
let parent = project.join(".pakku").join("parent");
assert!(parent.exists());
Ok(())
}